mirror of
https://github.com/gaseous-project/gaseous-server
synced 2026-04-21 13:27:16 +00:00
Compare commits
137 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b3ccf2f96a | ||
|
|
08e4fd7795 | ||
|
|
7d348a7236 | ||
|
|
d1875bcb19 | ||
|
|
f7044438a1 | ||
|
|
a63f11f9fa | ||
|
|
d1ffca1fef | ||
|
|
6a6806c2b5 | ||
|
|
c8b31f8d35 | ||
|
|
bceafb4137 | ||
|
|
1620282c45 | ||
|
|
ae1c232900 | ||
|
|
6bc1defa41 | ||
|
|
beefdce15c | ||
|
|
81cdfcfd7f | ||
|
|
04d427cbeb | ||
|
|
d2874c7474 | ||
|
|
997a36e427 | ||
|
|
17a4d2e70b | ||
|
|
62d25034f5 | ||
|
|
9aa6be0494 | ||
|
|
1f854cc080 | ||
|
|
965246b4ed | ||
|
|
65a1cf1d84 | ||
|
|
c97ae4c08c | ||
|
|
33a9e8caa1 | ||
|
|
70dff5199f | ||
|
|
a17103ab4e | ||
|
|
6cb041b738 | ||
|
|
0e15682f20 | ||
|
|
d0814f956f | ||
|
|
707bf87de6 | ||
|
|
e468b00fba | ||
|
|
13dd37598f | ||
|
|
2b09f63084 | ||
|
|
3120753b1a | ||
|
|
b791f835ef | ||
|
|
1585bac3f5 | ||
|
|
30174e9172 | ||
|
|
cead7813c8 | ||
|
|
5bd81b1526 | ||
|
|
2a7db700e4 | ||
|
|
6a1b3cc579 | ||
|
|
da16a6b78c | ||
|
|
1f9dbf462f | ||
|
|
a1640f12a8 | ||
|
|
dacf55c92a | ||
|
|
9f30318262 | ||
|
|
f63a37caec | ||
|
|
ab20102a3a | ||
|
|
83886889f6 | ||
|
|
945e0cf3be | ||
|
|
30871984b9 | ||
|
|
eccf856ffd | ||
|
|
5f6afb3abd | ||
|
|
f4b05074ce | ||
|
|
a39af56f4a | ||
|
|
ac05dec573 | ||
|
|
c938271555 | ||
|
|
41e9e65b13 | ||
|
|
8cc2568aac | ||
|
|
117d7c4843 | ||
|
|
6bc5c54c5c | ||
|
|
1ef2b5bf00 | ||
|
|
4aa7909273 | ||
|
|
555472b358 | ||
|
|
46e5734b15 | ||
|
|
eee2a3c665 | ||
|
|
399b70c766 | ||
|
|
f80fd15e13 | ||
|
|
de87b6c090 | ||
|
|
a274a6ca2b | ||
|
|
6f467e7cfd | ||
|
|
69ad3346a2 | ||
|
|
40d6e47d98 | ||
|
|
0510b20659 | ||
|
|
ad46aa4f46 | ||
|
|
e1ea877714 | ||
|
|
e1b96618b8 | ||
|
|
4e473d3bf3 | ||
|
|
f6bafe2e28 | ||
|
|
2594b9c91d | ||
|
|
f30de1a818 | ||
|
|
6a576eaf6a | ||
|
|
3a6cd7986e | ||
|
|
1f3df45c99 | ||
|
|
6e132d528c | ||
|
|
94841bc07b | ||
|
|
750a5a2390 | ||
|
|
dfe7bda815 | ||
|
|
0b5b49d7d1 | ||
|
|
bb5c5cd1b9 | ||
|
|
2a980e76e2 | ||
|
|
299ae8e337 | ||
|
|
ea25b58fcb | ||
|
|
92cdd8b6cf | ||
|
|
00f58bf561 | ||
|
|
66716eadff | ||
|
|
02c1957e9c | ||
|
|
8e7ada7090 | ||
|
|
e375f20782 | ||
|
|
d668eef0e8 | ||
|
|
ee85a2be32 | ||
|
|
c95e623365 | ||
|
|
bf43876ecc | ||
|
|
5efbd0072f | ||
|
|
38336e37ae | ||
|
|
ed77b18c7d | ||
|
|
b39bfb9099 | ||
|
|
a2040cb43c | ||
|
|
b101a245cc | ||
|
|
d2c51463f0 | ||
|
|
7c5079fffb | ||
|
|
9aa35326b5 | ||
|
|
fef60c2849 | ||
|
|
775ee9b2b7 | ||
|
|
e5501b5727 | ||
|
|
21188e28bb | ||
|
|
bf7aa19807 | ||
|
|
6a952093a7 | ||
|
|
8ed954f58e | ||
|
|
c97f1199cb | ||
|
|
2d63a1c416 | ||
|
|
146597dd4b | ||
|
|
f4ba84a54f | ||
|
|
64fb76484b | ||
|
|
bfade006bd | ||
|
|
c8140d7178 | ||
|
|
070589f718 | ||
|
|
bb86cb52f6 | ||
|
|
7dfb0b54eb | ||
|
|
f0783fcae8 | ||
|
|
68be24d514 | ||
|
|
a5da1a9033 | ||
|
|
fc09681cdd | ||
|
|
6185912151 | ||
|
|
deef919d5b |
594 changed files with 78979 additions and 24054 deletions
|
|
@ -1,4 +0,0 @@
|
|||
DATABASE_HOST=mariadb
|
||||
DATABASE_USER=root
|
||||
DATABASE_PASSWORD=gaseous
|
||||
DATABASE_DB=gaseous
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
FROM mcr.microsoft.com/devcontainers/dotnet:1-8.0-bookworm
|
||||
FROM mcr.microsoft.com/devcontainers/dotnet:10.0-noble
|
||||
|
||||
RUN apt-get update && apt-get install -y p7zip-full
|
||||
RUN mkdir -p /workspace/gaseous-server/wwwroot/emulators/EmulatorJS
|
||||
RUN wget https://cdn.emulatorjs.org/releases/4.0.11.7z
|
||||
RUN 7z x -y -o/workspace/gaseous-server/wwwroot/emulators/EmulatorJS 4.0.11.7z
|
||||
# update apt-get
|
||||
RUN apt-get update && curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
|
||||
|
||||
# download and unzip EmulatorJS from CDN
|
||||
RUN apt-get install -y p7zip-full default-jdk nodejs wget mariadb-client ffmpeg && apt-get upgrade -y && apt-get clean
|
||||
|
|
|
|||
|
|
@ -1,27 +1,25 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/dotnet
|
||||
{
|
||||
"name": "C# (.NET)",
|
||||
"name": "Gaseous C# (.NET)",
|
||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
||||
//"image": "mcr.microsoft.com/devcontainers/dotnet:1-8.0-bookworm",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "development",
|
||||
"workspaceFolder": "/workspace",
|
||||
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [5198],
|
||||
"forwardPorts": [
|
||||
5198
|
||||
],
|
||||
"portsAttributes": {
|
||||
"5198": {
|
||||
"protocol": "http"
|
||||
}
|
||||
"5198": {
|
||||
"protocol": "http"
|
||||
}
|
||||
},
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "dotnet restore",
|
||||
|
||||
"postCreateCommand": "dotnet restore && bash /workspace/build/scripts/get-ejs-git.sh",
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
|
|
@ -35,12 +33,16 @@
|
|||
"GitHub.vscode-pull-request-github",
|
||||
"AndersEAndersen.html-class-suggestions",
|
||||
"george-alisson.html-preview-vscode",
|
||||
"ms-dotnettools.vscodeintellicode-csharp",
|
||||
"Zignd.html-css-class-completion"
|
||||
"Zignd.html-css-class-completion",
|
||||
"PWABuilder.pwa-studio",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"SonarSource.sonarlint-vscode",
|
||||
"oderwat.indent-rainbow",
|
||||
"GitHub.copilot-chat",
|
||||
"eamodio.gitlens"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
services:
|
||||
development:
|
||||
build:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
volumes:
|
||||
|
|
@ -11,13 +11,15 @@ services:
|
|||
- dbhost=${DATABASE_HOST}
|
||||
- dbuser=${DATABASE_USER}
|
||||
- dbpass=${DATABASE_PASSWORD}
|
||||
- igdbclientid=<clientid>
|
||||
- igdbclientsecret=<clientsecret>
|
||||
- igdbclientid=${IGDB_CLIENT_ID}
|
||||
- igdbclientsecret=${IGDB_CLIENT_SECRET}
|
||||
mariadb:
|
||||
hostname: mariadb
|
||||
image: mariadb:latest
|
||||
ports:
|
||||
- 3306:3306
|
||||
environment:
|
||||
- MARIADB_ROOT_PASSWORD=${DATABASE_PASSWORD}
|
||||
- MARIADB_DATABASE=${DATABASE_DB}
|
||||
- MARIADB_USER=${DATABASE_USER}
|
||||
- MARIADB_PASSWORD=${DATABASE_PASSWORD}
|
||||
- MARIADB_PASSWORD=${DATABASE_PASSWORD}
|
||||
|
|
|
|||
16
.github/dependabot.yml
vendored
16
.github/dependabot.yml
vendored
|
|
@ -5,11 +5,23 @@
|
|||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "nuget" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "branch-v1.7.0"
|
||||
- package-ecosystem: "devcontainers"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "devcontainers"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "branch-v1.7.0"
|
||||
|
|
|
|||
4
.github/release.yml
vendored
4
.github/release.yml
vendored
|
|
@ -8,12 +8,16 @@ changelog:
|
|||
- note
|
||||
- bug
|
||||
- dependencies
|
||||
- experimental
|
||||
- title: Notes
|
||||
labels:
|
||||
- note
|
||||
- title: Bug Fixes
|
||||
labels:
|
||||
- bug
|
||||
- title: Experimental Features
|
||||
labels:
|
||||
- experimental
|
||||
- title: Dependencies
|
||||
labels:
|
||||
- dependencies
|
||||
|
|
|
|||
211
.github/scripts/generate_glossary_table.sh
vendored
Normal file
211
.github/scripts/generate_glossary_table.sh
vendored
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Regenerates the glossary core terms table between markers
|
||||
# <!-- GLOSSARY_TABLE_START --> and <!-- GLOSSARY_TABLE_END --> in docs/LOCALISATION_GLOSSARY.md
|
||||
# Terms sourced from a curated list below; translations pulled from base locale JSON files when keys exist.
|
||||
# Falls back to hard-coded mapping if specific concept not represented as a direct key.
|
||||
|
||||
GLOSSARY_FILE="docs/Localisation-Glossary.md"
|
||||
LOCALE_DIR="gaseous-lib/Support/Localisation"
|
||||
START_MARKER="<!-- GLOSSARY_TABLE_START -->"
|
||||
END_MARKER="<!-- GLOSSARY_TABLE_END -->"
|
||||
|
||||
BASES=(en fr de pt)
|
||||
|
||||
TERMS=(
|
||||
"settings"
|
||||
"preferences"
|
||||
"upload"
|
||||
"download"
|
||||
"library"
|
||||
"home"
|
||||
"favorites"
|
||||
"play"
|
||||
"delete"
|
||||
"add"
|
||||
"emulator"
|
||||
"firmware"
|
||||
"metadata"
|
||||
"signature"
|
||||
"rom"
|
||||
"media"
|
||||
"screenshot"
|
||||
"saved_game"
|
||||
"user"
|
||||
"username"
|
||||
"password"
|
||||
"two_factor_authentication"
|
||||
"error"
|
||||
"warning"
|
||||
"information"
|
||||
"search"
|
||||
"filter"
|
||||
"platform"
|
||||
"genre"
|
||||
"theme"
|
||||
"rating"
|
||||
"player"
|
||||
"collection"
|
||||
"size"
|
||||
"summary"
|
||||
"description"
|
||||
"link"
|
||||
"source"
|
||||
"logs"
|
||||
"task"
|
||||
"maintenance"
|
||||
"database"
|
||||
"import"
|
||||
"export"
|
||||
"reset"
|
||||
"background"
|
||||
"enabled"
|
||||
"disabled"
|
||||
"pending"
|
||||
"processing"
|
||||
"complete"
|
||||
"failed"
|
||||
"session"
|
||||
"game"
|
||||
"cover_art"
|
||||
)
|
||||
|
||||
# Mapping of concept -> locale key(s) to search.
|
||||
# Some concepts map to banner.* or generic.* keys; provide ordered candidates per concept.
|
||||
# Format: concept:key1,key2,...
|
||||
declare -A CONCEPT_KEYS=(
|
||||
[settings]="banner.settings;card.settings.header"
|
||||
[preferences]="banner.preferences"
|
||||
[upload]="banner.upload;generic.upload_complete"
|
||||
[download]="generic.download"
|
||||
[library]="banner.library"
|
||||
[home]="banner.home"
|
||||
[favorites]="home.favourites;home.favorites"
|
||||
[play]="generic.play;card.game.play_button_label"
|
||||
[delete]="generic.delete;card.management.delete"
|
||||
[add]="generic.add"
|
||||
[emulator]="card.buttons.emulator"
|
||||
[firmware]="card.settings.menu.firmware"
|
||||
[metadata]="card.management.metadata"
|
||||
[signature]="datasources.signature_source_label"
|
||||
[rom]="card.tabs.roms"
|
||||
[media]="card.rom.media_prefix"
|
||||
[screenshot]="generic.screenshot"
|
||||
[saved_game]="home.saved_games"
|
||||
[user]="usersettings.new_user_button"
|
||||
[username]="accountmodal.section.username_header"
|
||||
[password]="accountmodal.current_password_label"
|
||||
[two_factor_authentication]="accountmodal.tab.two_factor_authentication"
|
||||
[error]="generic.error;console.error_generic"
|
||||
[warning]="logs.filter.warning"
|
||||
[information]="logs.filter.information"
|
||||
[search]="filtering.title_search"
|
||||
[filter]="collection.edit.filter_header"
|
||||
[platform]="card.settings.menu.platforms"
|
||||
[genre]="collection.edit.genres_header"
|
||||
[theme]="collection.edit.themes_header"
|
||||
[rating]="card.rating.label"
|
||||
[player]="usereditmodal.user_role.player_option"
|
||||
[collection]="collection.edit.collection_header"
|
||||
[size]="card.rom.size_prefix"
|
||||
[summary]="card.game.summary_header"
|
||||
[description]="card.game.description_prefix"
|
||||
[link]="card.metadata.link_label"
|
||||
[source]="card.metadata.source_label"
|
||||
[logs]="card.settings.menu.logs"
|
||||
[task]="task.library_scan"
|
||||
[maintenance]="task.weekly_maintenance"
|
||||
[database]="homesettings.database_header"
|
||||
[import]="banner.upload"
|
||||
[export]="platforms.button.export_json"
|
||||
[reset]="card.emulator.reset_to_default"
|
||||
[background]="process.background"
|
||||
[enabled]="generic.enabled"
|
||||
[disabled]="generic.disabled"
|
||||
[pending]="uploadrommodal.status.pending"
|
||||
[processing]="uploadrommodal.status.processing"
|
||||
[complete]="uploadrommodal.status.complete"
|
||||
[failed]="uploadrommodal.status.failed"
|
||||
[session]="console.failed_create_session"
|
||||
[game]="first2page.metadata.igdb_option;index.title"
|
||||
[cover_art]="card.game.cover_art_alt"
|
||||
)
|
||||
|
||||
function extract_term() {
|
||||
local concept="${1:-}" loc="${2:-}"
|
||||
if [ -z "$loc" ]; then echo ""; return; fi
|
||||
local file="$LOCALE_DIR/$loc.json"
|
||||
if [ ! -f "$file" ]; then echo ""; return; fi
|
||||
local keys_string="${CONCEPT_KEYS[$concept]}"
|
||||
IFS=';' read -r -a candidates <<< "$keys_string"
|
||||
for k in "${candidates[@]}"; do
|
||||
# jq path lookup
|
||||
local value=$(jq -r --arg key "$k" '.strings[$key] // .serverstrings[$key] // empty' "$file")
|
||||
if [ -n "$value" ] && [ "$value" != "null" ]; then
|
||||
echo "$value"; return
|
||||
fi
|
||||
done
|
||||
echo "" # fallback blank; script may later substitute manual mapping
|
||||
}
|
||||
|
||||
# Manual fallback map for concepts not present; keyed by concept:en|fr|de|pt
|
||||
declare -A MANUAL_FALLBACK=(
|
||||
[media]="Media|Support|Medien|Média"
|
||||
[saved_game]="Saved Games|Jeux sauvegardés|Gespeicherte Spiele|Jogos Gravados"
|
||||
[favorites]="Favorites|Favoris|Favoriten|Favoritos"
|
||||
)
|
||||
|
||||
# Build table header
|
||||
TABLE=$'| Key Concept | English | French | German | Portuguese |\n'
|
||||
TABLE+=$'|-------------|---------|--------|--------|------------|\n'
|
||||
|
||||
for term in "${TERMS[@]}"; do
|
||||
en_val=$(extract_term "$term" en)
|
||||
fr_val=$(extract_term "$term" fr)
|
||||
de_val=$(extract_term "$term" de)
|
||||
pt_val=$(extract_term "$term" pt)
|
||||
|
||||
if [ -z "$en_val$fr_val$de_val$pt_val" ] && [ -n "${MANUAL_FALLBACK[$term]:-}" ]; then
|
||||
IFS='|' read -r en_val fr_val de_val pt_val <<< "${MANUAL_FALLBACK[$term]}"
|
||||
fi
|
||||
|
||||
# Normalise common formatting artifacts (trailing colon for description key etc.)
|
||||
en_val=${en_val%:}
|
||||
fr_val=${fr_val%:}
|
||||
de_val=${de_val%:}
|
||||
pt_val=${pt_val%:}
|
||||
|
||||
TABLE+="| ${term//_/ /} | ${en_val:-—} | ${fr_val:-—} | ${de_val:-—} | ${pt_val:-—} |\n"
|
||||
done
|
||||
|
||||
# Safety: ensure markers exist before attempting replacement
|
||||
if ! grep -q "$START_MARKER" "$GLOSSARY_FILE" || ! grep -q "$END_MARKER" "$GLOSSARY_FILE"; then
|
||||
echo "Error: glossary markers not found in $GLOSSARY_FILE. Aborting update." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Replace section between markers using Perl (non-greedy, DOTALL). Preserve surrounding content.
|
||||
ESCAPED_TABLE="$TABLE"
|
||||
if ! perl -0777 -i -pe "s/${START_MARKER}.*?${END_MARKER}/${START_MARKER}\n$ESCAPED_TABLE${END_MARKER}/s" "$GLOSSARY_FILE" 2>/dev/null; then
|
||||
echo "Perl replacement failed, attempting awk fallback" >&2
|
||||
awk -v start="$START_MARKER" -v end="$END_MARKER" -v repl="$ESCAPED_TABLE" 'BEGIN{infile=""} {infile=infile $0 "\n"} END {
|
||||
# Split keeping newlines
|
||||
n=split(infile, lines, "\n");
|
||||
foundStart=0; foundEnd=0; out=""; inside=0
|
||||
for(i=1;i<=n;i++){
|
||||
line=lines[i]
|
||||
if(line==start){
|
||||
foundStart=1
|
||||
out=out start "\n" repl end "\n"
|
||||
# Skip until end marker encountered
|
||||
inside=1; continue
|
||||
}
|
||||
if(line==end){ foundEnd=1; inside=0; continue }
|
||||
if(!inside){ out=out line "\n" }
|
||||
}
|
||||
if(foundStart && foundEnd){ printf "%s", out } else { printf "%s", infile > "/dev/stderr"; exit 2 }
|
||||
}' "$GLOSSARY_FILE" > "$GLOSSARY_FILE.tmp" && mv "$GLOSSARY_FILE.tmp" "$GLOSSARY_FILE"
|
||||
fi
|
||||
|
||||
echo "Glossary table updated successfully in $GLOSSARY_FILE"
|
||||
69
.github/scripts/generate_locale_table.sh
vendored
Normal file
69
.github/scripts/generate_locale_table.sh
vendored
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Generates the locale table section in docs/LOCALISATION.MD between
|
||||
# <!-- LOCALE_TABLE_START --> and <!-- LOCALE_TABLE_END --> markers.
|
||||
# Requires: jq
|
||||
|
||||
DOC_FILE="docs/Localisation.md"
|
||||
LOCALE_DIR="gaseous-lib/Support/Localisation"
|
||||
START_MARKER="<!-- LOCALE_TABLE_START -->"
|
||||
END_MARKER="<!-- LOCALE_TABLE_END -->"
|
||||
|
||||
if ! command -v jq >/dev/null 2>&1; then
|
||||
echo "jq is required to generate the locale table." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$LOCALE_DIR" ]; then
|
||||
echo "Locale directory '$LOCALE_DIR' not found." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Build table header
|
||||
TABLE=$'| Code | English Name | Native Name | Type | Parent | Pluralisation | Notes |\n'
|
||||
TABLE+=$'|------|--------------|-------------|------|--------|---------------|-------|\n'
|
||||
|
||||
# Iterate over JSON files, sorted for deterministic output
|
||||
for file in $(ls "$LOCALE_DIR"/*.json | sort); do
|
||||
code=$(jq -r '.code // ""' "$file")
|
||||
name=$(jq -r '.name // ""' "$file")
|
||||
nativeName=$(jq -r '.nativeName // ""' "$file")
|
||||
type=$(jq -r '.type // ""' "$file")
|
||||
pluralRuleExists=$(jq -r 'has("pluralRule")' "$file")
|
||||
pluralRulesExists=$(jq -r 'has("pluralRules")' "$file")
|
||||
parent="—"
|
||||
if [ "$type" = "Overlay" ]; then
|
||||
parent=$(echo "$code" | cut -d'-' -f1)
|
||||
fi
|
||||
pluralisation=""
|
||||
if [ "$pluralRuleExists" = "true" ] && [ "$pluralRulesExists" = "true" ]; then
|
||||
pluralisation="pluralRule + pluralRules"
|
||||
elif [ "$pluralRulesExists" = "true" ]; then
|
||||
pluralisation="pluralRules"
|
||||
elif [ "$pluralRuleExists" = "true" ]; then
|
||||
pluralisation="pluralRule"
|
||||
else
|
||||
pluralisation="(none)"
|
||||
fi
|
||||
notes=""
|
||||
# Basic heuristic notes for English regional spelling differences
|
||||
if [[ "$code" =~ ^en- ]] && [ "$code" != "en-US" ] && [ "$code" != "en" ]; then
|
||||
notes="Regional spelling"
|
||||
fi
|
||||
if [ "$code" = "en" ]; then
|
||||
notes="Provides full master string set & advanced multi-category rules"
|
||||
fi
|
||||
if [ "$code" = "en-US" ]; then
|
||||
notes="Acts as default American variant"
|
||||
fi
|
||||
TABLE+=$"| $code | $name | $nativeName | $type | $parent | $pluralisation | $notes |"$'\n'
|
||||
done
|
||||
|
||||
# Escape slashes for sed replacement
|
||||
ESCAPED_TABLE=$(printf '%s' "$TABLE" | sed 's/\\/\\\\/g; s/\//\\\//g')
|
||||
|
||||
# Use perl for multi-line safe replacement between markers
|
||||
perl -0777 -i -pe "s/${START_MARKER}.*?${END_MARKER}/${START_MARKER}\n${ESCAPED_TABLE}${END_MARKER}/s" "$DOC_FILE"
|
||||
|
||||
echo "Locale table updated in $DOC_FILE"
|
||||
|
|
@ -3,36 +3,21 @@ name: Build Pre-release Docker Image
|
|||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-preview.[0-9]'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]'
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+-preview\.[0-9]'
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]'
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'true'
|
||||
- name: Install dotnet tool
|
||||
run: dotnet tool install -g dotnetCampus.TagToVersion
|
||||
- name: Set tag to version
|
||||
run: dotnet TagToVersion -t ${{ github.ref }}
|
||||
- name: Sign in to Nuget
|
||||
run: dotnet nuget add source --username michael-j-green --password ${{ secrets.NUGETKEY }} --store-password-in-clear-text --name github "https://nuget.pkg.github.com/gaseous-project/index.json"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: gaseousgames/gaseousserver:${{ github.ref_name}}
|
||||
call-reusable:
|
||||
name: Build Pre-release Docker Image
|
||||
uses: ./.github/workflows/_build-docker-images.yml
|
||||
with:
|
||||
version_tag: ${{ github.ref_name }}
|
||||
tag_to_version: ${{ github.ref }}
|
||||
include_latest: false
|
||||
secrets: inherit
|
||||
45
.github/workflows/BuildDockerOnTag-Release.yml
vendored
45
.github/workflows/BuildDockerOnTag-Release.yml
vendored
|
|
@ -3,35 +3,20 @@ name: Build Release Docker Image
|
|||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+'
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'true'
|
||||
- name: Install dotnet tool
|
||||
run: dotnet tool install -g dotnetCampus.TagToVersion
|
||||
- name: Set tag to version
|
||||
run: dotnet TagToVersion -t ${{ github.ref }}
|
||||
- name: Sign in to Nuget
|
||||
run: dotnet nuget add source --username michael-j-green --password ${{ secrets.NUGETKEY }} --store-password-in-clear-text --name github "https://nuget.pkg.github.com/gaseous-project/index.json"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: gaseousgames/gaseousserver:latest,gaseousgames/gaseousserver:${{ github.ref_name}}
|
||||
call-reusable:
|
||||
name: Build Release Docker Image
|
||||
uses: ./.github/workflows/_build-docker-images.yml
|
||||
with:
|
||||
version_tag: ${{ github.ref_name }}
|
||||
tag_to_version: ${{ github.ref }}
|
||||
include_latest: true
|
||||
secrets: inherit
|
||||
86
.github/workflows/BuildInstallerOnTag-Prerelease.yml
vendored
Normal file
86
.github/workflows/BuildInstallerOnTag-Prerelease.yml
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
name: Build Prerelease Windows Installer
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+-preview\.[0-9]'
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+-rc\.[0-9]'
|
||||
|
||||
jobs:
|
||||
msi:
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
permissions:
|
||||
contents: write
|
||||
packages: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'true'
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Install tag tool
|
||||
run: dotnet tool install -g dotnetCampus.TagToVersion
|
||||
|
||||
- name: Add .NET tools to PATH
|
||||
run: echo "$($env:USERPROFILE)\.dotnet\tools" >> $env:GITHUB_PATH
|
||||
|
||||
- name: Set tag to version
|
||||
run: dotnet TagToVersion -t ${{ github.ref }}
|
||||
|
||||
- name: Compute sanitized MSI version
|
||||
run: |
|
||||
# strip off extension like "-preview.1" or "-rc.2"
|
||||
$tag = $($env:GITHUB_REF_NAME).split("-")[0]
|
||||
# strip off leading "v"
|
||||
$v = $tag.trimstart("v")
|
||||
# parse into version object to validate
|
||||
[version]$ver = [version]::Parse($v)
|
||||
# set sanitized version for later steps
|
||||
echo "SANITIZED_VERSION=$($ver.ToString())" >> $env:GITHUB_ENV
|
||||
echo "Computed sanitized version: $($ver.ToString())"
|
||||
|
||||
- name: Sign in to NuGet (GitHub Packages)
|
||||
run: dotnet nuget add source --username michael-j-green --password ${{ secrets.NUGETKEY }} --store-password-in-clear-text --name github "https://nuget.pkg.github.com/gaseous-project/index.json"
|
||||
|
||||
- name: Allow PowerShell script execution
|
||||
run: Set-ExecutionPolicy -ExecutionPolicy Bypass -Scope Process -Force
|
||||
|
||||
- name: Publish server for harvesting
|
||||
run: |
|
||||
dotnet publish .\gaseous-server\gaseous-server.csproj -c Release -r win-x64 --self-contained true -p:PublishSingleFile=false -o .\installer\setup\publish\win-x64
|
||||
|
||||
- name: Generate Harvest.wxs
|
||||
run: |
|
||||
New-Item -ItemType Directory -Force -Path .\installer\setup\publish\win-x64 | Out-Null
|
||||
New-Item -ItemType Directory -Force -Path .\installer\setup\obj\harvest | Out-Null
|
||||
pwsh -File .\installer\setup\GenerateHarvest.ps1 -PublishDir .\installer\setup\publish\win-x64 -OutFile .\installer\setup\obj\harvest\Harvest.wxs
|
||||
|
||||
- name: Build MSI (Release)
|
||||
run: |
|
||||
dotnet build .\installer\setup\Setup.wixproj -c Release -p:Version=$env:SANITIZED_VERSION -p:ProductVersion=$env:SANITIZED_VERSION
|
||||
|
||||
- name: Rename MSI with tag
|
||||
run: |
|
||||
$src = "installer/setup/bin/x64/Release/Setup.msi"
|
||||
if (!(Test-Path $src)) { throw "MSI not found at $src" }
|
||||
$dest = "GaseousServer-$($env:GITHUB_REF_NAME)-x64.msi"
|
||||
Copy-Item $src $dest -Force
|
||||
Write-Host "Created $dest"
|
||||
|
||||
- name: Upload MSI to GitHub Release (prerelease)
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: |
|
||||
GaseousServer-${{ github.ref_name }}-x64.msi
|
||||
prerelease: true
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
85
.github/workflows/BuildInstallerOnTag-Release.yml
vendored
Normal file
85
.github/workflows/BuildInstallerOnTag-Release.yml
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
name: Build Release Windows Installer
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+'
|
||||
|
||||
jobs:
|
||||
msi:
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
permissions:
|
||||
contents: write
|
||||
packages: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'true'
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Install tag tool
|
||||
run: dotnet tool install -g dotnetCampus.TagToVersion
|
||||
|
||||
- name: Add .NET tools to PATH
|
||||
run: echo "$($env:USERPROFILE)\.dotnet\tools" >> $env:GITHUB_PATH
|
||||
|
||||
- name: Set tag to version
|
||||
run: dotnet TagToVersion -t ${{ github.ref }}
|
||||
|
||||
- name: Compute sanitized MSI version
|
||||
run: |
|
||||
# strip off extension like "-preview.1" or "-rc.2"
|
||||
$tag = $($env:GITHUB_REF_NAME).split("-")[0]
|
||||
# strip off leading "v"
|
||||
$v = $tag.trimstart("v")
|
||||
# parse into version object to validate
|
||||
[version]$ver = [version]::Parse($v)
|
||||
# set sanitized version for later steps
|
||||
echo "SANITIZED_VERSION=$($ver.ToString())" >> $env:GITHUB_ENV
|
||||
echo "Computed sanitized version: $($ver.ToString())"
|
||||
|
||||
- name: Sign in to NuGet (GitHub Packages)
|
||||
run: dotnet nuget add source --username michael-j-green --password ${{ secrets.NUGETKEY }} --store-password-in-clear-text --name github "https://nuget.pkg.github.com/gaseous-project/index.json"
|
||||
|
||||
- name: Allow PowerShell script execution
|
||||
run: Set-ExecutionPolicy -ExecutionPolicy Bypass -Scope Process -Force
|
||||
|
||||
- name: Publish server for harvesting
|
||||
run: |
|
||||
dotnet publish .\gaseous-server\gaseous-server.csproj -c Release -r win-x64 --self-contained true -p:PublishSingleFile=false -o .\installer\setup\publish\win-x64
|
||||
|
||||
- name: Generate Harvest.wxs
|
||||
run: |
|
||||
New-Item -ItemType Directory -Force -Path .\installer\setup\publish\win-x64 | Out-Null
|
||||
New-Item -ItemType Directory -Force -Path .\installer\setup\obj\harvest | Out-Null
|
||||
pwsh -File .\installer\setup\GenerateHarvest.ps1 -PublishDir .\installer\setup\publish\win-x64 -OutFile .\installer\setup\obj\harvest\Harvest.wxs
|
||||
|
||||
- name: Build MSI (Release)
|
||||
run: |
|
||||
dotnet build .\installer\setup\Setup.wixproj -c Release -p:Version=$env:SANITIZED_VERSION -p:ProductVersion=$env:SANITIZED_VERSION
|
||||
|
||||
- name: Rename MSI with tag
|
||||
run: |
|
||||
$src = "installer/setup/bin/x64/Release/Setup.msi"
|
||||
if (!(Test-Path $src)) { throw "MSI not found at $src" }
|
||||
$dest = "GaseousServer-$($env:GITHUB_REF_NAME)-x64.msi"
|
||||
Copy-Item $src $dest -Force
|
||||
Write-Host "Created $dest"
|
||||
|
||||
- name: Upload MSI to GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: |
|
||||
GaseousServer-${{ github.ref_name }}-x64.msi
|
||||
prerelease: false
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
51
.github/workflows/BuildNightly.yml
vendored
Normal file
51
.github/workflows/BuildNightly.yml
vendored
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
name: Build Nightly Docker Image
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '15 4 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
check-changes:
|
||||
name: Check for code changes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
has_changes: ${{ steps.check.outputs.has_changes }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check for changes since yesterday
|
||||
id: check
|
||||
run: |
|
||||
# Get commits from the last 24 hours
|
||||
YESTERDAY=$(date -u -d '24 hours ago' '+%Y-%m-%d %H:%M:%S')
|
||||
CHANGES=$(git log --since="$YESTERDAY" --oneline | wc -l)
|
||||
|
||||
if [ "$CHANGES" -gt 0 ]; then
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
echo "Found $CHANGES commit(s) in the last 24 hours"
|
||||
else
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
echo "No commits found in the last 24 hours"
|
||||
fi
|
||||
|
||||
call-reusable:
|
||||
name: Build Nightly Docker Image
|
||||
needs: check-changes
|
||||
# Run if manually triggered OR if changes detected
|
||||
if: github.event_name == 'workflow_dispatch' || needs.check-changes.outputs.has_changes == 'true'
|
||||
uses: ./.github/workflows/_build-docker-images.yml
|
||||
with:
|
||||
version_tag: nightly
|
||||
tag_to_version: 0.0.0-nightly
|
||||
include_latest: false
|
||||
secrets: inherit
|
||||
36
.github/workflows/BuildOnTestBranch.yml
vendored
36
.github/workflows/BuildOnTestBranch.yml
vendored
|
|
@ -1,36 +0,0 @@
|
|||
name: Build test branch
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [test]
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'true'
|
||||
- name: Install dotnet tool
|
||||
run: dotnet tool install -g dotnetCampus.TagToVersion
|
||||
- name: Set tag to version
|
||||
run: dotnet TagToVersion -t 0.0.1
|
||||
- name: Sign in to Nuget
|
||||
run: dotnet nuget add source --username michael-j-green --password ${{ secrets.NUGETKEY }} --store-password-in-clear-text --name github "https://nuget.pkg.github.com/gaseous-project/index.json"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: gaseousgames/test:latest
|
||||
29
.github/workflows/CacheCleanup.yml
vendored
Normal file
29
.github/workflows/CacheCleanup.yml
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
name: Cleanup github runner caches on closed pull requests
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: write
|
||||
steps:
|
||||
- name: Cleanup
|
||||
run: |
|
||||
echo "Fetching list of cache keys"
|
||||
cacheKeysForPR=$(gh cache list --ref $BRANCH --limit 100 --json id --jq '.[].id')
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh cache delete $cacheKey
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
BRANCH: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
141
.github/workflows/_build-docker-images.yml
vendored
Normal file
141
.github/workflows/_build-docker-images.yml
vendored
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
name: Reusable - Build Docker Images
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version_tag:
|
||||
description: Docker tag to use for the version (e.g., nightly or v1.2.3)
|
||||
required: true
|
||||
type: string
|
||||
tag_to_version:
|
||||
description: Value to pass to TagToVersion -t (e.g., 0.0.0-nightly or github.ref)
|
||||
required: true
|
||||
type: string
|
||||
include_latest:
|
||||
description: Also tag and push :latest (and :latest-embeddeddb) variants
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
Staging:
|
||||
name: Prepare repo for Docker build
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'true'
|
||||
- name: Install dotnet tool
|
||||
run: dotnet tool install -g dotnetCampus.TagToVersion
|
||||
- name: Set tag to version
|
||||
run: dotnet TagToVersion -t "${{ inputs.tag_to_version }}"
|
||||
- name: Archive modified repo
|
||||
run: |
|
||||
tar \
|
||||
--warning=no-file-changed \
|
||||
--exclude=.git \
|
||||
--exclude=NuGet.Config \
|
||||
--exclude=nuget.config \
|
||||
--exclude='**/bin' \
|
||||
--exclude='**/obj' \
|
||||
-czf "$RUNNER_TEMP/repo.tgz" .
|
||||
- name: Upload modified repo
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: repo-from-staging
|
||||
path: ${{ runner.temp }}/repo.tgz
|
||||
retention-days: 1
|
||||
|
||||
BuildDockerImages:
|
||||
name: Build and push (${{ matrix.variant }})
|
||||
needs: Staging
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- variant: standard
|
||||
dockerfile: ./build/Dockerfile
|
||||
tagsuffix: ""
|
||||
- variant: embeddeddb
|
||||
dockerfile: ./build/Dockerfile-EmbeddedDB
|
||||
tagsuffix: "-embeddeddb"
|
||||
steps:
|
||||
- name: Download modified repo
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: repo-from-staging
|
||||
path: .
|
||||
- name: Extract repo
|
||||
run: tar -xzf repo.tgz && rm repo.tgz
|
||||
- name: Write temporary nuget.config for GitHub Packages
|
||||
shell: bash
|
||||
run: |
|
||||
cat > nuget.config <<'EOF'
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
<add key="github" value="https://nuget.pkg.github.com/gaseous-project/index.json" />
|
||||
</packageSources>
|
||||
<packageSourceCredentials>
|
||||
<github>
|
||||
<add key="Username" value="michael-j-green" />
|
||||
<add key="ClearTextPassword" value="${{ secrets.NUGETKEY }}" />
|
||||
</github>
|
||||
</packageSourceCredentials>
|
||||
</configuration>
|
||||
EOF
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to GitHub Package Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- id: compute-tags
|
||||
name: Compute tags
|
||||
shell: bash
|
||||
run: |
|
||||
tags=$(printf "%s\n%s" \
|
||||
"gaseousgames/gaseousserver:${{ inputs.version_tag }}${{ matrix.tagsuffix }}" \
|
||||
"ghcr.io/gaseous-project/gaseousserver:${{ inputs.version_tag }}${{ matrix.tagsuffix }}")
|
||||
if [[ "${{ inputs.include_latest }}" == 'true' && "${{ matrix.tagsuffix }}" == '' ]]; then
|
||||
tags=$(printf "%s\n%s\n%s" \
|
||||
"$tags" \
|
||||
"gaseousgames/gaseousserver:latest" \
|
||||
"ghcr.io/gaseous-project/gaseousserver:latest")
|
||||
fi
|
||||
{
|
||||
echo "tags<<EOF"
|
||||
echo "$tags"
|
||||
echo "EOF"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
pull: true
|
||||
provenance: true
|
||||
sbom: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.compute-tags.outputs.tags }}
|
||||
77
.github/workflows/check-migration-scripts.yml
vendored
Normal file
77
.github/workflows/check-migration-scripts.yml
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
name: Migration Script Guard
|
||||
|
||||
# Protects historical migration SQL files from modification.
|
||||
# Adding new files is always allowed; editing or deleting existing ones is denied.
|
||||
# Any PR that modifies a file that already existed on the base branch will fail.
|
||||
#
|
||||
# POLICY: Migration scripts are immutable once merged.
|
||||
# If a bug needs correcting, add a new migration version instead.
|
||||
|
||||
"on":
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'gaseous-lib/Support/Database/MySQL/gaseous-*.sql'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
guard-migration-scripts:
|
||||
name: Check migration scripts are immutable
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Detect modified or deleted historical migration scripts
|
||||
id: check
|
||||
run: |
|
||||
# Get the list of files changed in this PR relative to the merge base
|
||||
BASE_SHA=$(git merge-base HEAD origin/${{ github.base_ref }})
|
||||
|
||||
# Files that existed on the base branch and have been changed or removed
|
||||
VIOLATIONS=$(git diff --name-status "$BASE_SHA" HEAD \
|
||||
-- 'gaseous-lib/Support/Database/MySQL/gaseous-*.sql' \
|
||||
| grep -E '^[MD]' \
|
||||
| awk '{print $2}' \
|
||||
|| true)
|
||||
|
||||
if [ -n "$VIOLATIONS" ]; then
|
||||
echo "::error::The following historical migration scripts have been modified or deleted."
|
||||
echo "::error::Migration scripts are immutable once merged. Add a new migration version instead."
|
||||
echo ""
|
||||
echo "$VIOLATIONS" | while read -r f; do
|
||||
echo " VIOLATION: $f"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "No historical migration scripts were modified. Check passed."
|
||||
|
||||
- name: Validate manifest covers latest migration version
|
||||
run: |
|
||||
# Find the highest numeric migration version present in the SQL files
|
||||
LATEST=$(ls gaseous-lib/Support/Database/MySQL/gaseous-[0-9]*.sql 2>/dev/null \
|
||||
| grep -oE '[0-9]+' | sort -n | tail -1 || echo "0")
|
||||
|
||||
echo "Latest migration version in SQL files: $LATEST"
|
||||
|
||||
# Extract the MaxManifestVersion constant from the C# manifest file.
|
||||
# We do this by scanning for all SchemaVersion assignments in the manifest
|
||||
# and picking the maximum, rather than requiring a separate constant.
|
||||
MANIFEST_MAX=$(grep -oP '(?<=SchemaVersion = )\d+' \
|
||||
gaseous-lib/Classes/Database/DatabaseMigrationManifest.cs \
|
||||
| sort -n | tail -1 || echo "0")
|
||||
|
||||
echo "Highest schema version in manifest: $MANIFEST_MAX"
|
||||
|
||||
if [ "$MANIFEST_MAX" -lt "$LATEST" ]; then
|
||||
echo "::error::Migration version $LATEST has no validation entry in DatabaseMigrationManifest.cs."
|
||||
echo "::error::Add at least one ValidationEntry with SchemaVersion = $LATEST before merging."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Manifest coverage check passed."
|
||||
85
.github/workflows/codeql.yml
vendored
85
.github/workflows/codeql.yml
vendored
|
|
@ -1,85 +0,0 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main", "branch-v*.*.*" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "main" ]
|
||||
schedule:
|
||||
- cron: '21 11 * * 2'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners
|
||||
# Consider using larger runners for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'csharp', 'javascript' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby', 'swift' ]
|
||||
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Sign in to Nuget
|
||||
run: dotnet nuget add source --username michael-j-green --password ${{ secrets.NUGETKEY }} --store-password-in-clear-text --name github "https://nuget.pkg.github.com/gaseous-project/index.json"
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
19
.github/workflows/dotnet.yml
vendored
19
.github/workflows/dotnet.yml
vendored
|
|
@ -7,18 +7,27 @@ on:
|
|||
branches: [main]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Dotnet
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v1
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 10.0.x
|
||||
- name: Sign in to Nuget
|
||||
run: dotnet nuget add source --username michael-j-green --password ${{ secrets.NUGETKEY }} --store-password-in-clear-text --name github "https://nuget.pkg.github.com/gaseous-project/index.json"
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore
|
||||
- name: Build
|
||||
run: dotnet build --no-restore
|
||||
- name: Build server
|
||||
run: dotnet build ./gaseous-server/gaseous-server.csproj --no-restore -c Release
|
||||
- name: Build CLI
|
||||
run: dotnet build ./gaseous-cli/gaseous-cli.csproj --no-restore -c Release
|
||||
139
.github/workflows/language-coverage.yml
vendored
Normal file
139
.github/workflows/language-coverage.yml
vendored
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
name: Check Language Coverage
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'gaseous-lib/Support/Localisation/*.json'
|
||||
- '.github/workflows/language-coverage.yml'
|
||||
|
||||
jobs:
|
||||
language-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Check language coverage
|
||||
id: check-coverage
|
||||
run: |
|
||||
node <<'EOF'
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const locDir = path.join(process.env.GITHUB_WORKSPACE, 'gaseous-lib/Support/Localisation');
|
||||
const enPath = path.join(locDir, 'en.json');
|
||||
const en = JSON.parse(fs.readFileSync(enPath, 'utf8'));
|
||||
const enStrings = Object.keys(en.strings || {});
|
||||
const enServerStrings = Object.keys(en.serverstrings || {});
|
||||
|
||||
// Helper: is base language file (e.g. fr.json, de.json)
|
||||
function isBaseLang(filename) {
|
||||
return /^[a-z]{2}\.json$/.test(filename) && filename !== 'en.json';
|
||||
}
|
||||
|
||||
const missing = [];
|
||||
for (const file of fs.readdirSync(locDir)) {
|
||||
if (!isBaseLang(file)) continue;
|
||||
const filePath = path.join(locDir, file);
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(fs.readFileSync(filePath, 'utf8'));
|
||||
} catch (e) {
|
||||
missing.push({ file, error: 'Invalid JSON' });
|
||||
continue;
|
||||
}
|
||||
const strings = Object.keys(data.strings || {});
|
||||
const serverstrings = Object.keys(data.serverstrings || {});
|
||||
const missingStrings = enStrings.filter(k => !strings.includes(k));
|
||||
const missingServerStrings = enServerStrings.filter(k => !serverstrings.includes(k));
|
||||
if (missingStrings.length || missingServerStrings.length) {
|
||||
missing.push({ file, missingStrings, missingServerStrings });
|
||||
}
|
||||
}
|
||||
|
||||
// Save result for later steps
|
||||
fs.writeFileSync('missing-language-keys.json', JSON.stringify(missing, null, 2));
|
||||
EOF
|
||||
|
||||
- name: Comment on PR with language coverage status
|
||||
if: always()
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
let missing = [];
|
||||
try {
|
||||
missing = JSON.parse(fs.readFileSync('missing-language-keys.json', 'utf8'));
|
||||
} catch {}
|
||||
|
||||
// Comment identifier for finding existing bot comments
|
||||
const commentIdentifier = '<!-- language-coverage-bot -->';
|
||||
|
||||
let body;
|
||||
if (missing.length === 0) {
|
||||
body = `${commentIdentifier}\n### :white_check_mark: Language Coverage\n\nAll base language files have complete translations! :tada:`;
|
||||
} else {
|
||||
body = `${commentIdentifier}\n### :warning: Missing Language Keys\n`;
|
||||
for (const entry of missing) {
|
||||
body += `\n**${entry.file}**`;
|
||||
if (entry.error) {
|
||||
body += `\n- Error: ${entry.error}`;
|
||||
continue;
|
||||
}
|
||||
if (entry.missingStrings.length) {
|
||||
body += `\n- Missing strings: ${entry.missingStrings.join(', ')}`;
|
||||
}
|
||||
if (entry.missingServerStrings.length) {
|
||||
body += `\n- Missing serverstrings: ${entry.missingServerStrings.join(', ')}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find existing bot comment
|
||||
const comments = await github.rest.issues.listComments({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
});
|
||||
|
||||
const botComment = comments.data.find(comment =>
|
||||
comment.body.includes(commentIdentifier)
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
// Update existing comment
|
||||
await github.rest.issues.updateComment({
|
||||
comment_id: botComment.id,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body
|
||||
});
|
||||
console.log('Updated existing language coverage comment');
|
||||
} else {
|
||||
// Create new comment
|
||||
await github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body
|
||||
});
|
||||
console.log('Created new language coverage comment');
|
||||
}
|
||||
|
||||
- name: Fail if missing keys
|
||||
if: always()
|
||||
run: |
|
||||
missing=$(cat missing-language-keys.json)
|
||||
if [ "$missing" != "[]" ]; then
|
||||
echo "Some base language files are missing keys. See PR comments."
|
||||
exit 1
|
||||
fi
|
||||
94
.github/workflows/update-localisation-doc.yml
vendored
Normal file
94
.github/workflows/update-localisation-doc.yml
vendored
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
name: Update Localisation Documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- main
|
||||
paths:
|
||||
- 'gaseous-lib/Support/Localisation/*.json'
|
||||
- 'docs/Localisation.md'
|
||||
- 'docs/Localisation-Glossary.md'
|
||||
- '.github/scripts/generate_locale_table.sh'
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
regenerate-locale-table:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: 'recursive'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure git for submodules
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git submodule foreach --recursive 'git config user.name "github-actions[bot]"'
|
||||
git submodule foreach --recursive 'git config user.email "41898282+github-actions[bot]@users.noreply.github.com"'
|
||||
|
||||
- name: Set up jq
|
||||
run: sudo apt-get update && sudo apt-get install -y jq
|
||||
|
||||
- name: Generate locale table
|
||||
run: |
|
||||
chmod +x .github/scripts/generate_locale_table.sh
|
||||
.github/scripts/generate_locale_table.sh
|
||||
chmod +x .github/scripts/generate_glossary_table.sh
|
||||
.github/scripts/generate_glossary_table.sh
|
||||
|
||||
- name: Detect changes
|
||||
id: detect
|
||||
run: |
|
||||
changed=false
|
||||
submodule_changed=false
|
||||
|
||||
# Check for changes in docs submodule
|
||||
if ! git diff --quiet docs/Localisation.md; then
|
||||
changed=true
|
||||
fi
|
||||
if ! git diff --quiet docs/Localisation-Glossary.md; then
|
||||
changed=true
|
||||
fi
|
||||
|
||||
# Check if docs is a submodule
|
||||
if [ -f .gitmodules ] && grep -q "path = docs" .gitmodules; then
|
||||
submodule_changed=true
|
||||
fi
|
||||
|
||||
echo "changed=${changed}" >> $GITHUB_OUTPUT
|
||||
echo "submodule_changed=${submodule_changed}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit submodule changes
|
||||
if: ${{ steps.detect.outputs.changed == 'true' && steps.detect.outputs.submodule_changed == 'true' }}
|
||||
run: |
|
||||
cd docs
|
||||
git add Localisation.md Localisation-Glossary.md
|
||||
if ! git diff --cached --quiet; then
|
||||
git commit -m "docs: auto-update localisation tables (language + glossary)"
|
||||
git push origin HEAD
|
||||
fi
|
||||
|
||||
- name: Update parent repository submodule reference
|
||||
if: ${{ steps.detect.outputs.changed == 'true' && steps.detect.outputs.submodule_changed == 'true' }}
|
||||
run: |
|
||||
git add docs
|
||||
if ! git diff --cached --quiet; then
|
||||
git commit -m "docs: update submodule reference for localisation table updates"
|
||||
git push
|
||||
fi
|
||||
|
||||
- name: Commit direct changes (non-submodule)
|
||||
if: ${{ steps.detect.outputs.changed == 'true' && steps.detect.outputs.submodule_changed == 'false' }}
|
||||
run: |
|
||||
git add docs/Localisation.md docs/Localisation-Glossary.md
|
||||
git commit -m "docs: auto-update localisation tables (language + glossary)"
|
||||
git push
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "Locale/glossary regeneration complete. Changed: ${{ steps.detect.outputs.changed }}" >> $GITHUB_STEP_SUMMARY
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
|
|
@ -0,0 +1,3 @@
|
|||
[submodule "docs"]
|
||||
path = docs
|
||||
url = https://github.com/gaseous-project/gaseous-server.wiki.git
|
||||
81
.vscode/launch.json
vendored
81
.vscode/launch.json
vendored
|
|
@ -2,35 +2,92 @@
|
|||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
// Use IntelliSense to find out which attributes exist for C# debugging
|
||||
// Use hover for the description of the existing attributes
|
||||
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
|
||||
"name": ".NET Core Launch (web)",
|
||||
"name": "Server (Web + Browser)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
// If you have changed target frameworks, make sure to update the program path.
|
||||
"program": "${workspaceFolder}/gaseous-server/bin/Debug/net8.0/gaseous-server.dll",
|
||||
"preLaunchTask": "full-rebuild",
|
||||
"program": "${workspaceFolder}/gaseous-server/bin/Debug/net10.0/gaseous-server.dll",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}/gaseous-server",
|
||||
"stopAtEntry": false,
|
||||
// Enable launching a web browser when ASP.NET Core starts. For more information: https://aka.ms/VSCode-CS-LaunchJson-WebBrowser
|
||||
"console": "integratedTerminal",
|
||||
"serverReadyAction": {
|
||||
"action": "openExternally",
|
||||
"pattern": "\\bNow listening on:\\s+(http?://\\S+)"
|
||||
},
|
||||
"env": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development",
|
||||
"ASPNETCORE_DETAILEDERRORS": "true"
|
||||
},
|
||||
"justMyCode": true,
|
||||
"requireExactSource": false,
|
||||
"enableStepFiltering": false
|
||||
},
|
||||
{
|
||||
"name": "Server (Debug Exceptions)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "full-rebuild",
|
||||
"program": "${workspaceFolder}/gaseous-server/bin/Debug/net10.0/gaseous-server.dll",
|
||||
"cwd": "${workspaceFolder}/gaseous-server",
|
||||
"stopAtEntry": false,
|
||||
"console": "integratedTerminal",
|
||||
"env": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development",
|
||||
"ASPNETCORE_DETAILEDERRORS": "true"
|
||||
},
|
||||
"justMyCode": false,
|
||||
"requireExactSource": false,
|
||||
"symbolOptions": {
|
||||
"searchMicrosoftSymbolServer": true,
|
||||
"searchNuGetOrgSymbolServer": true
|
||||
},
|
||||
"logging": {
|
||||
"moduleLoad": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Server (Stop at Entry)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "full-rebuild",
|
||||
"program": "${workspaceFolder}/gaseous-server/bin/Debug/net10.0/gaseous-server.dll",
|
||||
"cwd": "${workspaceFolder}/gaseous-server",
|
||||
"stopAtEntry": true,
|
||||
"console": "integratedTerminal",
|
||||
"env": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development"
|
||||
},
|
||||
"sourceFileMap": {
|
||||
"/Views": "${workspaceFolder}/Views"
|
||||
},
|
||||
"enableStepFiltering": false
|
||||
"justMyCode": true
|
||||
},
|
||||
{
|
||||
"name": "Attach by PID",
|
||||
"type": "coreclr",
|
||||
"request": "attach",
|
||||
"processId": "${command:pickProcess}"
|
||||
},
|
||||
{
|
||||
"name": ".NET Core Attach",
|
||||
"type": "coreclr",
|
||||
"request": "attach"
|
||||
},
|
||||
{
|
||||
"name": "Process Host",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "full-rebuild",
|
||||
"program": "${workspaceFolder}/gaseous-processhost/bin/Debug/net10.0/gaseous-processhost.dll",
|
||||
"args": [
|
||||
"--service",
|
||||
"MetadataRefresh",
|
||||
"--reportingserver",
|
||||
"https://localhost:5197",
|
||||
"--correlationid",
|
||||
"00000000-0000-0000-0000-000000000000",
|
||||
"--force"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/gaseous-processhost",
|
||||
"stopAtEntry": false
|
||||
}
|
||||
]
|
||||
}
|
||||
5
.vscode/settings.json
vendored
Normal file
5
.vscode/settings.json
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"editor.formatOnPaste": true,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true
|
||||
}
|
||||
253
.vscode/tasks.json
vendored
253
.vscode/tasks.json
vendored
|
|
@ -1,41 +1,216 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/gaseous-server/gaseous-server.csproj",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "publish",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"publish",
|
||||
"${workspaceFolder}/gaseous-server/gaseous-server.csproj",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "watch",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"watch",
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/gaseous-server/gaseous-server.csproj"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
]
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "prepare-web",
|
||||
"dependsOn": [
|
||||
"kill-gaseous-server",
|
||||
"build"
|
||||
],
|
||||
"dependsOrder": "sequence",
|
||||
"problemMatcher": []
|
||||
},
|
||||
// Added explicit clean and full-rebuild sequence for debugging
|
||||
{
|
||||
"label": "clean",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"clean",
|
||||
"${workspaceFolder}/Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "full-rebuild",
|
||||
"dependsOn": [
|
||||
"kill-gaseous-server",
|
||||
"clean",
|
||||
"build"
|
||||
],
|
||||
"dependsOrder": "sequence",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "kill-gaseous-server",
|
||||
"type": "shell",
|
||||
"command": "bash",
|
||||
"args": [
|
||||
"-lc",
|
||||
"chmod +x ${workspaceFolder}/build/scripts/kill-gaseous-server.sh && ${workspaceFolder}/build/scripts/kill-gaseous-server.sh"
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "build",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/Gaseous.sln",
|
||||
"/t:Rebuild",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "publish",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"publish",
|
||||
"${workspaceFolder}/gaseous-server/gaseous-server.csproj",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "watch",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"watch",
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/gaseous-server/gaseous-server.csproj"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "build (dotnet)",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "rebuild (dotnet)",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "build solution after status strip change",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "build solution with dual status panels",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "build after startup initializer",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "rebuild after cleaning ProgramHost",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "rebuild confirm after removing ProgramHost",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\Gaseous.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "dotnet publish gaseous-server",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"publish",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\gaseous-server\\gaseous-server.csproj",
|
||||
"-c",
|
||||
"Release"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Build MSI",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\installer\\setup\\Setup.wixproj",
|
||||
"-c",
|
||||
"Release"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Build MSI",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\installer\\setup\\Setup.wixproj",
|
||||
"-c",
|
||||
"Release"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Build MSI",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\installer\\setup\\Setup.wixproj",
|
||||
"-c",
|
||||
"Release"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Build MSI",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"c:\\Users\\michael.green\\source\\repos\\gaseous-server\\installer\\setup\\Setup.wixproj",
|
||||
"-c",
|
||||
"Release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
28
Dockerfile
28
Dockerfile
|
|
@ -1,28 +0,0 @@
|
|||
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:8.0 AS build-env
|
||||
ARG TARGETARCH
|
||||
ARG BUILDPLATFORM
|
||||
WORKDIR /App
|
||||
EXPOSE 80
|
||||
|
||||
RUN echo "Target: $TARGETARCH"
|
||||
RUN echo "Build: $BUILDPLATFORM"
|
||||
|
||||
# Copy everything
|
||||
COPY . ./
|
||||
# Restore as distinct layers
|
||||
RUN dotnet restore "gaseous-server/gaseous-server.csproj" -a $TARGETARCH
|
||||
# Build and publish a release
|
||||
RUN dotnet publish "gaseous-server/gaseous-server.csproj" --use-current-runtime --self-contained true -c Release -o out -a $TARGETARCH
|
||||
|
||||
# download and unzip EmulatorJS from CDN
|
||||
RUN apt-get update && apt-get install -y p7zip-full
|
||||
RUN mkdir -p out/wwwroot/emulators/EmulatorJS
|
||||
RUN wget https://cdn.emulatorjs.org/releases/4.0.12.7z
|
||||
RUN 7z x -y -oout/wwwroot/emulators/EmulatorJS 4.0.12.7z
|
||||
|
||||
# Build runtime image
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0
|
||||
ENV INDOCKER=1
|
||||
WORKDIR /App
|
||||
COPY --from=build-env /App/out .
|
||||
ENTRYPOINT ["dotnet", "gaseous-server.dll"]
|
||||
82
Gaseous.sln
82
Gaseous.sln
|
|
@ -21,16 +21,98 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "screenshots", "screenshots"
|
|||
screenshots\Game.png = screenshots\Game.png
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "gaseous-cli", "gaseous-cli\gaseous-cli.csproj", "{419CC4E4-8932-4E4A-B027-5521AA0CBA85}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "gaseous-configurator", "gaseous-configurator\gaseous-configurator.csproj", "{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "gaseous-server.Tests", "gaseous-server.Tests\gaseous-server.Tests.csproj", "{A476D629-DC6F-4C78-9084-8F910429AFB3}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "gaseous-processhost", "gaseous-processhost\gaseous-processhost.csproj", "{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "gaseous-lib", "gaseous-lib\gaseous-lib.csproj", "{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Debug|x64 = Debug|x64
|
||||
Debug|x86 = Debug|x86
|
||||
Release|Any CPU = Release|Any CPU
|
||||
Release|x64 = Release|x64
|
||||
Release|x86 = Release|x86
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Release|x64.Build.0 = Release|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{A01D2EFF-C82E-473B-84D7-7C25E736F5D2}.Release|x86.Build.0 = Release|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Release|x64.Build.0 = Release|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{419CC4E4-8932-4E4A-B027-5521AA0CBA85}.Release|x86.Build.0 = Release|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Release|x64.Build.0 = Release|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{C6A3A0D2-76AA-4C14-9A7B-7A8E6D7C9A11}.Release|x86.Build.0 = Release|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Release|x64.Build.0 = Release|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{A476D629-DC6F-4C78-9084-8F910429AFB3}.Release|x86.Build.0 = Release|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Release|x64.Build.0 = Release|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{CFCDA316-2CBA-497D-87AC-EFB86E2705AE}.Release|x86.Build.0 = Release|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Release|x64.Build.0 = Release|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{E6B7DDA6-BD74-436D-BAC5-8E42D1AC6E2F}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
|
|
|||
67
README.MD
67
README.MD
|
|
@ -1,53 +1,48 @@
|
|||
[](https://github.com/gaseous-project/gaseous-server/actions/workflows/dotnet.yml) [](https://github.com/gaseous-project/gaseous-server/actions/workflows/codeql.yml) [](https://github.com/gaseous-project/gaseous-server/actions/workflows/BuildDockerOnTag-Release.yml)
|
||||
# Gaseous Server
|
||||
[](https://github.com/gaseous-project/gaseous-server/actions/workflows/dotnet.yml) [](https://github.com/gaseous-project/gaseous-server/actions/workflows/BuildNightly.yml) [](https://github.com/gaseous-project/gaseous-server/actions/workflows/BuildDockerOnTag-Release.yml)
|
||||
# <img src="./logo.png" height="28" style="float: right;" /> Gaseous Server
|
||||
|
||||
This is the server for the Gaseous system. It offers ROM and title management, as well as some basic in browser emulation of those ROMs.
|
||||
|
||||
## Warning
|
||||
> Version 1.7.0 and later contain user authentication, and can be exposed to the internet. However, it is recommended to not expose the server to the internet if you're not actively using it remotely, or if you have alternative means to access it remotely like a VPN.
|
||||
|
||||
Versions 1.6.1 and earlier are not suitable for being exposed to the internet, as:
|
||||
1. there is no authentication support, meaning anyone could trash your library
|
||||
2. the server has not been hardened for exposure to the internet - so there maybe unknown vulnerabilities
|
||||
|
||||
If you expose one of these earlier versions of the server to the internet, **you do so at your own risk**.
|
||||
|
||||
Version 1.7.0 and later contain user authentication, and can be exposed to the internet. However, it is recommended to no expose the server to the internet if you're not actively using it remotely, or if you have alternative means to access it remotely like a VPN.
|
||||
|
||||
While we do our best to stay on top of server security, if you expose the server to the internet **you do so at your own risk**.
|
||||
> While we do our best to stay on top of server security, if you expose the server to the internet **you do so at your own risk**.
|
||||
|
||||
## Screenshots
|
||||

|
||||

|
||||

|
||||
|
||||

|
||||

|
||||

|
||||
|
||||
## Requirements
|
||||
* MariaDB 11.1.2 (preferred) or MySQL Server 8+
|
||||
* MariaDB 11.1.2+ (preferred) or MySQL Server 8+
|
||||
* These are the database versions Gaseous has been tested and developed against. Your mileage may vary with earlier versions.
|
||||
* MariaDB is the preferred database server, while MySQL will continue to be supported for existing users (they should be interchangable).
|
||||
* Note that due to the earlier database schema using MySQL specific features, moving to MariaDB from MySQL will require rebuilding your database from scratch. The "Library Scan" background task can be used to re-import all titles.
|
||||
* Internet Game Database API Key. See: https://api-docs.igdb.com/#account-creation
|
||||
|
||||
If using the provided docker-compose.yml, MariaDB will be installed for you.
|
||||
|
||||
## Friends of Gaseous
|
||||
* [EmulatorJS](https://github.com/EmulatorJS/EmulatorJS): A fantastic (and fast) Javascript based implementation of RetroArch, supporting a wide variety of platforms. Discord: https://discord.gg/6akryGkETU
|
||||
* [RomM](https://github.com/zurdi15/romm): Another self hosted ROM manager. Discord: https://discord.gg/P5HtHnhUDH
|
||||
|
||||
## Third Party Projects
|
||||
The following projects are used by Gaseous
|
||||
* [ASP.NET](https://dotnet.microsoft.com/en-us/apps/aspnet)
|
||||
* [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json)
|
||||
* [MySQLConnector](https://mysqlconnector.net)
|
||||
* [IGDB-DOTNET](https://github.com/kamranayub/igdb-dotnet)
|
||||
* [EmulatorJS](https://github.com/EmulatorJS/EmulatorJS)
|
||||
|
||||
## Discord Server
|
||||
[](https://discord.gg/Nhu7wpT3k4)
|
||||
* ffmpeg
|
||||
* Internet Game Database API Key. See: https://api-docs.igdb.com/#account-creation - only required when not using the Hasheous proxy
|
||||
|
||||
# Installation
|
||||
See https://github.com/gaseous-project/gaseous-server/wiki/Installation for installation instructions.
|
||||
|
||||
# Adding Content
|
||||
1. Import signatures: see https://github.com/gaseous-project/gaseous-server/wiki/Signatures
|
||||
1. (Optional) Import signatures: see https://github.com/gaseous-project/gaseous-server/wiki/Signatures
|
||||
2. Add ROMs: see https://github.com/gaseous-project/gaseous-server/wiki/Adding-ROMs
|
||||
|
||||
# Friends of Gaseous
|
||||
* [EmulatorJS](https://github.com/EmulatorJS/EmulatorJS): A fantastic (and fast) Javascript based implementation of RetroArch, supporting a wide variety of platforms. Discord: https://discord.gg/6akryGkETU
|
||||
* [RomM](https://github.com/zurdi15/romm): Another self hosted ROM manager. Discord: https://discord.gg/P5HtHnhUDH
|
||||
|
||||
# Discord Server
|
||||
Join our Discord server: https://discord.gg/Nhu7wpT3k4
|
||||
|
||||
## Tests
|
||||
|
||||
Run unit tests locally:
|
||||
|
||||
```
|
||||
dotnet test gaseous-server.Tests/gaseous-server.Tests.csproj
|
||||
```
|
||||
|
||||
The test suite covers:
|
||||
- JSON and binary response handling in `HTTPComms.SendRequestAsync`
|
||||
- `Retry-After` parsing and retry behavior
|
||||
- Cancellation token behavior
|
||||
74
build/Dockerfile
Normal file
74
build/Dockerfile
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:10.0 AS build-env
|
||||
ARG TARGETARCH
|
||||
ARG BUILDPLATFORM
|
||||
WORKDIR /App
|
||||
EXPOSE 80
|
||||
|
||||
RUN echo "Target: $TARGETARCH"
|
||||
RUN echo "Build: $BUILDPLATFORM"
|
||||
|
||||
# Copy everything
|
||||
COPY .. ./
|
||||
|
||||
# Build Gaseous Web Server
|
||||
# Restore as distinct layers
|
||||
RUN dotnet restore "gaseous-server/gaseous-server.csproj" -a $TARGETARCH
|
||||
# Build and publish a release
|
||||
RUN dotnet publish "gaseous-server/gaseous-server.csproj" --use-current-runtime --self-contained true -c Release -o out -a $TARGETARCH
|
||||
|
||||
# update apt-get
|
||||
RUN apt-get update && apt upgrade -y
|
||||
|
||||
# download and unzip EmulatorJS from CDN
|
||||
# RUN apt-get install -y p7zip-full
|
||||
# RUN mkdir -p out/wwwroot/emulators/EmulatorJS
|
||||
# RUN wget https://cdn.emulatorjs.org/releases/4.2.3.7z
|
||||
# RUN 7z x -y -oout/wwwroot/emulators/EmulatorJS 4.2.3.7z
|
||||
RUN bash build/scripts/get-ejs-git.sh && mv gaseous-server/wwwroot/emulators/EmulatorJS out/wwwroot/emulators/EmulatorJS
|
||||
|
||||
# clean up apt-get
|
||||
RUN apt-get clean && rm -rf /var/lib/apt/lists
|
||||
|
||||
# Build runtime image
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0
|
||||
ENV INDOCKER=1
|
||||
WORKDIR /App
|
||||
COPY --from=build-env /App/out .
|
||||
|
||||
# variables
|
||||
ARG PUID=1000
|
||||
ARG PGID=1000
|
||||
ARG dbhost=localhost
|
||||
ARG dbuser=root
|
||||
ARG dbpass=gaseous
|
||||
|
||||
ENV PUID=${PUID}
|
||||
ENV PGID=${PGID}
|
||||
ENV dbhost=${dbhost}
|
||||
ENV dbuser=${dbuser}
|
||||
ENV dbpass=${dbpass}
|
||||
|
||||
# install supervisord
|
||||
RUN apt-get update && apt-get install -y supervisor
|
||||
COPY ../build/standard/supervisord.conf /etc/supervisor/conf.d/supervisord.conf
|
||||
RUN mkdir -p /var/run/supervisord
|
||||
RUN mkdir -p /var/log/supervisord
|
||||
|
||||
# Install support tools
|
||||
RUN apt-get install -y curl mariadb-client ffmpeg
|
||||
|
||||
# clean up apt-get
|
||||
RUN apt-get clean && rm -rf /var/lib/apt/lists
|
||||
|
||||
# copy entrypoint
|
||||
COPY ../build/standard/entrypoint.sh /usr/sbin/entrypoint.sh
|
||||
RUN chmod +x /usr/sbin/entrypoint.sh
|
||||
|
||||
# volumes
|
||||
VOLUME /home/gaseous/.gaseous-server
|
||||
|
||||
# Configure healthcheck
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=60s --retries=3 CMD curl --fail http://localhost:80/api/v1.1/HealthCheck || exit 1
|
||||
|
||||
# start services
|
||||
ENTRYPOINT [ "/usr/sbin/entrypoint.sh" ]
|
||||
80
build/Dockerfile-EmbeddedDB
Normal file
80
build/Dockerfile-EmbeddedDB
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:10.0 AS build-env
|
||||
ARG TARGETARCH
|
||||
ARG BUILDPLATFORM
|
||||
WORKDIR /App
|
||||
EXPOSE 80
|
||||
|
||||
RUN echo "Target: $TARGETARCH"
|
||||
RUN echo "Build: $BUILDPLATFORM"
|
||||
|
||||
# Copy everything
|
||||
COPY .. ./
|
||||
|
||||
# Build Gaseous Web Server
|
||||
# Restore as distinct layers
|
||||
RUN dotnet restore "gaseous-server/gaseous-server.csproj" -a $TARGETARCH
|
||||
# Build and publish a release
|
||||
RUN dotnet publish "gaseous-server/gaseous-server.csproj" --use-current-runtime --self-contained true -c Release -o out -a $TARGETARCH
|
||||
|
||||
# update apt-get
|
||||
RUN apt-get update && apt upgrade -y
|
||||
|
||||
# download and unzip EmulatorJS from CDN
|
||||
# RUN apt-get install -y p7zip-full
|
||||
# RUN mkdir -p out/wwwroot/emulators/EmulatorJS
|
||||
# RUN wget https://cdn.emulatorjs.org/releases/4.2.3.7z
|
||||
# RUN 7z x -y -oout/wwwroot/emulators/EmulatorJS 4.2.3.7z
|
||||
RUN bash build/scripts/get-ejs-git.sh && mv gaseous-server/wwwroot/emulators/EmulatorJS out/wwwroot/emulators/EmulatorJS
|
||||
|
||||
# Build runtime image
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0
|
||||
ENV INDOCKER=1
|
||||
WORKDIR /App
|
||||
COPY --from=build-env /App/out .
|
||||
|
||||
# variables
|
||||
ARG PUID=1000
|
||||
ARG PGID=1000
|
||||
ARG dbhost=localhost
|
||||
ARG dbuser=root
|
||||
ARG dbpass=gaseous
|
||||
ARG MARIADB_ROOT_PASSWORD=$dbpass
|
||||
|
||||
ENV PUID=${PUID}
|
||||
ENV PGID=${PGID}
|
||||
ENV dbhost=${dbhost}
|
||||
ENV dbuser=${dbuser}
|
||||
ENV dbpass=${dbpass}
|
||||
ENV MARIADB_ROOT_PASSWORD=${dbpass}
|
||||
|
||||
# install mariadb
|
||||
RUN DEBIAN_FRONTEND=noninteractive && \
|
||||
apt-get update && apt-get install -y mariadb-server mariadb-client
|
||||
RUN mkdir -p /run/mysqld
|
||||
COPY ../build/embeddeddb/mariadb.sh /usr/sbin/start-mariadb.sh
|
||||
RUN chmod +x /usr/sbin/start-mariadb.sh
|
||||
|
||||
# install supervisord
|
||||
RUN apt-get install -y supervisor
|
||||
COPY ../build/embeddeddb/supervisord.conf /etc/supervisor/conf.d/supervisord.conf
|
||||
RUN mkdir -p /var/run/supervisord
|
||||
RUN mkdir -p /var/log/supervisord
|
||||
|
||||
# Install support tools
|
||||
RUN apt-get install -y curl ffmpeg
|
||||
|
||||
# clean up apt-get
|
||||
RUN apt-get clean && rm -rf /var/lib/apt/lists
|
||||
|
||||
# copy entrypoint
|
||||
COPY ../build/embeddeddb/entrypoint.sh /usr/sbin/entrypoint.sh
|
||||
RUN chmod +x /usr/sbin/entrypoint.sh
|
||||
|
||||
# volumes
|
||||
VOLUME /home/gaseous/.gaseous-server /var/lib/mysql
|
||||
|
||||
# Configure healthcheck
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=60s --retries=3 CMD curl --fail http://localhost:80/api/v1.1/HealthCheck || exit 1
|
||||
|
||||
# start services
|
||||
ENTRYPOINT [ "/usr/sbin/entrypoint.sh" ]
|
||||
32
build/embeddeddb/entrypoint.sh
Normal file
32
build/embeddeddb/entrypoint.sh
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
#!/bin/sh
|
||||
|
||||
# create the user
|
||||
echo "Creating user gaseous with UID ${PUID} and GID ${PGID}"
|
||||
getent group ${PGID} > /dev/null 2>&1 || groupadd -g ${PGID} gaseous
|
||||
|
||||
# Check if user with PUID exists
|
||||
if id ${PUID} > /dev/null 2>&1; then
|
||||
# User exists, get its name and rename if necessary
|
||||
CURRENT_USER=$(id -un ${PUID})
|
||||
if [ "$CURRENT_USER" != "gaseous" ]; then
|
||||
usermod -l gaseous -d /home/gaseous "$CURRENT_USER"
|
||||
fi
|
||||
else
|
||||
# User doesn't exist, create it
|
||||
useradd -u ${PUID} -g ${PGID} -m gaseous -d /home/gaseous -G sudo
|
||||
fi
|
||||
usermod -p "*" gaseous
|
||||
mkdir -p /home/gaseous/.aspnet
|
||||
chown -R ${PUID} /App /home/gaseous/.aspnet
|
||||
chgrp -R ${PGID} /App /home/gaseous/.aspnet
|
||||
mkdir -p /home/gaseous/.gaseous-server
|
||||
chown -R ${PUID} /App /home/gaseous/.gaseous-server
|
||||
chgrp -R ${PGID} /App /home/gaseous/.gaseous-server
|
||||
|
||||
# Set MariaDB permissions
|
||||
mkdir -p /var/lib/mysql /var/log/mariadb /run/mysqld
|
||||
chown -R ${PUID} /var/lib/mysql /var/log/mariadb /run/mysqld
|
||||
chgrp -R ${PGID} /var/lib/mysql /var/log/mariadb /run/mysqld
|
||||
|
||||
# Start supervisord and services
|
||||
/usr/bin/supervisord -c /etc/supervisor/conf.d/supervisord.conf
|
||||
25
build/embeddeddb/mariadb.sh
Normal file
25
build/embeddeddb/mariadb.sh
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
#!/bin/sh
|
||||
|
||||
# install the database
|
||||
echo "Installing MariaDB"
|
||||
/usr/bin/mariadb-install-db --datadir=/var/lib/mysql --user=gaseous
|
||||
|
||||
# start the database server without network or grant tables
|
||||
echo "Starting MariaDB"
|
||||
/usr/sbin/mariadbd --datadir=/var/lib/mysql --skip-grant-tables --skip-networking &
|
||||
|
||||
# wait for the server to start
|
||||
sleep 5
|
||||
|
||||
# change the root password
|
||||
echo "Setting MariaDB root password"
|
||||
mariadb -u root -e "FLUSH PRIVILEGES; ALTER USER 'root'@'localhost' IDENTIFIED BY '$MARIADB_ROOT_PASSWORD'; ALTER USER 'gaseous'@'localhost' IDENTIFIED BY '$MARIADB_ROOT_PASSWORD'; FLUSH PRIVILEGES; SHUTDOWN;"
|
||||
|
||||
# stop the server
|
||||
sleep 5
|
||||
echo "Stopping MariaDB"
|
||||
killall mariadbd
|
||||
|
||||
# start the server normally
|
||||
echo "Starting MariaDB"
|
||||
/usr/sbin/mariadbd --datadir=/var/lib/mysql --user=gaseous
|
||||
37
build/embeddeddb/supervisord.conf
Normal file
37
build/embeddeddb/supervisord.conf
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
logfile=/var/log/supervisord/supervisord.log
|
||||
logfile_maxbytes=50
|
||||
logfile_backups=5
|
||||
pidfile=/var/run/supervisord/supervisord.pid
|
||||
loglevel = info
|
||||
|
||||
[unix_http_server]
|
||||
file=/var/run/supervisord/supervisor.sock
|
||||
chmod=0700
|
||||
|
||||
[rpcinterface:supervisor]
|
||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
||||
|
||||
[supervisorctl]
|
||||
serverurl=unix:///var/run/supervisord/supervisor.sock
|
||||
|
||||
[program:mariadb]
|
||||
user=gaseous
|
||||
command=bash -c "/usr/sbin/start-mariadb.sh"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
[program:gaseous-server]
|
||||
user=gaseous
|
||||
command=dotnet /App/gaseous-server.dll
|
||||
environment=HOME="/home/gaseous",USER="gaseous"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
150
build/scripts/find-unused-wwwroot.sh
Normal file
150
build/scripts/find-unused-wwwroot.sh
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
#!/usr/bin/env bash
|
||||
# Detect potentially unused files under wwwroot.
|
||||
# Heuristics:
|
||||
# 1. Page HTML/JS considered referenced if their base name appears in LoadPageContent('name') or navigateToPage('name') or query parameter 'page=' usage.
|
||||
# 2. Global scripts considered referenced if listed in scriptLinks array inside index.html, imported via dynamic import(), or mentioned by name in other scripts.
|
||||
# 3. Stylesheets referenced if linked in <head> or imported via @import in CSS.
|
||||
# 4. Images referenced if their path appears in any file under wwwroot (src=, url(), etc.).
|
||||
# Exclusions:
|
||||
# - Honors .gitignore: files ignored by git will be skipped.
|
||||
# - Skips wwwroot/emulators/EmulatorJS (large third-party bundle).
|
||||
# Output: JSON summary + plain text sections.
|
||||
# NOTE: This is a heuristic; manual review required before deletion.
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "$0")/../../" && pwd)"
|
||||
WWWROOT="$ROOT_DIR/gaseous-server/wwwroot"
|
||||
EMULATOR_EXCLUDE_REL="emulators/EmulatorJS"
|
||||
GITIGNORE_FILE="$ROOT_DIR/.gitignore"
|
||||
|
||||
if [[ ! -d "$WWWROOT" ]]; then
|
||||
echo "wwwroot directory not found: $WWWROOT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Build ignore list from .gitignore (basic lines only; skip patterns with wildcards complexity)
|
||||
IGNORE_PATTERNS=()
|
||||
if [[ -f "$GITIGNORE_FILE" ]]; then
|
||||
while IFS= read -r line; do
|
||||
[[ -z "$line" || "$line" == \#* ]] && continue
|
||||
IGNORE_PATTERNS+=("$line")
|
||||
done < "$GITIGNORE_FILE"
|
||||
fi
|
||||
|
||||
should_ignore() {
|
||||
local path="$1"
|
||||
for pat in "${IGNORE_PATTERNS[@]}"; do
|
||||
# Basic containment or fnmatch
|
||||
if [[ "$pat" == *"*"* || "$pat" == *"?"* || "$pat" == *"["* ]]; then
|
||||
if [[ $path == $pat ]]; then return 0; fi
|
||||
else
|
||||
if [[ "$path" == *"$pat"* ]]; then return 0; fi
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# Collect all candidate files excluding emulator bundle and .DS_Store
|
||||
mapfile -t ALL_FILES < <(find "$WWWROOT" -type f \( ! -path "*/$EMULATOR_EXCLUDE_REL/*" \) ! -name '.DS_Store' | sed "s#$WWWROOT/##")
|
||||
|
||||
# Filter out ignored files
|
||||
FILTERED_FILES=()
|
||||
for f in "${ALL_FILES[@]}"; do
|
||||
if should_ignore "$f"; then continue; fi
|
||||
FILTERED_FILES+=("$f")
|
||||
done
|
||||
|
||||
# Function to grep safely
|
||||
grep_ref() {
|
||||
local pattern="$1"
|
||||
grep -R --no-color -F "$pattern" "$WWWROOT" 2>/dev/null || true
|
||||
}
|
||||
|
||||
# Extract referenced page names via LoadPageContent('name') and navigateToPage('name')
|
||||
PAGE_NAMES=$(grep -R -E "LoadPageContent\('([a-zA-Z0-9_-]+)'" "$WWWROOT/index.html" "$WWWROOT/scripts" "$WWWROOT/pages" 2>/dev/null | sed -E "s/.*LoadPageContent\('([a-zA-Z0-9_-]+)'.*/\1/" | sort -u)
|
||||
NAV_PAGE_NAMES=$(grep -R -E "navigateToPage\('([a-zA-Z0-9_-]+)'" "$WWWROOT" 2>/dev/null | sed -E "s/.*navigateToPage\('([a-zA-Z0-9_-]+)'.*/\1/" | sort -u)
|
||||
QUERY_PAGE_NAMES=$(grep -R -E "page=([a-zA-Z0-9_-]+)" "$WWWROOT" 2>/dev/null | sed -E "s/.*page=([a-zA-Z0-9_-]+).*/\1/" | sort -u)
|
||||
REFERENCED_PAGES=$(printf "%s\n%s\n%s\n" "$PAGE_NAMES" "$NAV_PAGE_NAMES" "$QUERY_PAGE_NAMES" | sort -u | grep -v '^$' || true)
|
||||
|
||||
# All page base names from pages/*.html
|
||||
ALL_PAGE_BASES=$(find "$WWWROOT/pages" -maxdepth 1 -type f -name '*.html' -printf '%f\n' | sed 's/\.html$//' | sort -u)
|
||||
|
||||
# Determine unused pages
|
||||
UNUSED_PAGES=()
|
||||
for p in $ALL_PAGE_BASES; do
|
||||
if ! echo "$REFERENCED_PAGES" | grep -qx "$p"; then
|
||||
UNUSED_PAGES+=("$p")
|
||||
fi
|
||||
done
|
||||
|
||||
# Scripts referenced via scriptLinks array plus explicit <script src> tags
|
||||
# 1. scriptLinks array entries
|
||||
SCRIPT_LINKS=$(grep -A50 "scriptLinks" "$WWWROOT/index.html" | grep -E '"/scripts/[^" ]+\.js"' | sed -E 's/.*"\/scripts\/([^" ]+\.js)".*/\1/' | sort -u)
|
||||
# 2. language.js imported dynamically
|
||||
SCRIPT_LINKS+=$'\n'"language.js"
|
||||
# 3. <script src="/scripts/..."> tags in index.html and all pages/*.html
|
||||
HTML_SCRIPT_TAG_SOURCES=$(grep -R -E '<script[^>]+src="/scripts/[^" ]+\.js"' "$WWWROOT/index.html" "$WWWROOT/pages" 2>/dev/null | sed -E 's/.*src="\/scripts\/([^" ]+\.js)".*/\1/' | sort -u || true)
|
||||
SCRIPT_LINKS+=$'\n'"$HTML_SCRIPT_TAG_SOURCES"
|
||||
# Consolidate unique referenced scripts
|
||||
REFERENCED_SCRIPTS=$(echo "$SCRIPT_LINKS" | tr '\n' '\n' | grep -v '^$' | sort -u)
|
||||
|
||||
# All top-level scripts
|
||||
ALL_SCRIPTS=$(find "$WWWROOT/scripts" -maxdepth 1 -type f -name '*.js' -printf '%f\n' | sort -u)
|
||||
UNUSED_SCRIPTS=()
|
||||
for s in $ALL_SCRIPTS; do
|
||||
if ! echo "$REFERENCED_SCRIPTS" | grep -qx "$s"; then
|
||||
# secondary heuristic: if name appears anywhere else it might be dynamic
|
||||
if ! grep_ref "$s" | grep -q "$s"; then
|
||||
UNUSED_SCRIPTS+=("$s")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Stylesheets referenced in index.html
|
||||
STYLE_LINKS=$(grep -A50 "styleSheets" "$WWWROOT/index.html" | grep -E '"/styles/[^" ]+\.css"' | sed -E 's/.*"\/styles\/([^" ]+\.css)".*/\1/' | sort -u)
|
||||
ALL_STYLES=$(find "$WWWROOT/styles" -maxdepth 1 -type f -name '*.css' -printf '%f\n' | sort -u)
|
||||
UNUSED_STYLES=()
|
||||
for st in $ALL_STYLES; do
|
||||
if ! echo "$STYLE_LINKS" | grep -qx "$st"; then
|
||||
if ! grep_ref "$st" | grep -q "$st"; then
|
||||
UNUSED_STYLES+=("$st")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Images heuristic: referenced if literal path appears anywhere. Large; we keep simple.
|
||||
ALL_IMAGES=$(find "$WWWROOT/images" -type f -printf '%P\n' 2>/dev/null | sort -u || true)
|
||||
REF_IMAGES=$(grep -R --no-color -E '/images/' "$WWWROOT" 2>/dev/null | sed -E 's/.*\/images\/([^"'\'') ]+).*/\1/' | sort -u || true)
|
||||
UNUSED_IMAGES=()
|
||||
for im in $ALL_IMAGES; do
|
||||
if ! echo "$REF_IMAGES" | grep -qx "$im"; then
|
||||
UNUSED_IMAGES+=("$im")
|
||||
fi
|
||||
done
|
||||
|
||||
json_escape() { printf '%s' "$1" | sed 's/"/\\"/g'; }
|
||||
|
||||
# Output
|
||||
printf '\n==== UNUSED PAGES (heuristic) ====\n'
|
||||
for p in "${UNUSED_PAGES[@]}"; do echo "$p.html"; done
|
||||
printf '\n==== UNUSED SCRIPTS (heuristic) ====\n'
|
||||
for s in "${UNUSED_SCRIPTS[@]}"; do echo "$s"; done
|
||||
printf '\n==== UNUSED STYLES (heuristic) ====\n'
|
||||
for st in "${UNUSED_STYLES[@]}"; do echo "$st"; done
|
||||
printf '\n==== UNUSED IMAGES (heuristic) ====\n'
|
||||
for im in "${UNUSED_IMAGES[@]}"; do echo "$im"; done
|
||||
|
||||
# JSON summary for GitHub Actions consumption
|
||||
{
|
||||
printf '{"unusedPages":['
|
||||
first=1; for p in "${UNUSED_PAGES[@]}"; do [[ $first -eq 0 ]] && printf ','; first=0; printf '"%s"' "$(json_escape "$p.html")"; done
|
||||
printf '],"unusedScripts":['
|
||||
first=1; for s in "${UNUSED_SCRIPTS[@]}"; do [[ $first -eq 0 ]] && printf ','; first=0; printf '"%s"' "$(json_escape "$s")"; done
|
||||
printf '],"unusedStyles":['
|
||||
first=1; for st in "${UNUSED_STYLES[@]}"; do [[ $first -eq 0 ]] && printf ','; first=0; printf '"%s"' "$(json_escape "$st")"; done
|
||||
printf '],"unusedImages":['
|
||||
first=1; for im in "${UNUSED_IMAGES[@]}"; do [[ $first -eq 0 ]] && printf ','; first=0; printf '"%s"' "$(json_escape "$im")"; done
|
||||
printf ']}'
|
||||
}
|
||||
|
||||
exit 0
|
||||
51
build/scripts/get-ejs-git.sh
Normal file
51
build/scripts/get-ejs-git.sh
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This script clones the EmulatorJS repository into the specified directory.
|
||||
# It is intended to be used in the build process, and run from the project root.
|
||||
REPO_URL="https://github.com/EmulatorJS/EmulatorJS.git"
|
||||
REPO_DIR="./gaseous-server/wwwroot/emulators/EmulatorJS"
|
||||
|
||||
if [ ! -d "$REPO_DIR/.git" ]; then
|
||||
echo "Cloning EmulatorJS repository..."
|
||||
rm -rf "$REPO_DIR"
|
||||
git clone "$REPO_URL" "$REPO_DIR"
|
||||
else
|
||||
echo "Repository exists. Resetting to origin and pulling latest..."
|
||||
pushd "$REPO_DIR" >/dev/null || { echo "Failed to enter repo dir"; exit 1; }
|
||||
git fetch origin
|
||||
# Prefer main; fall back to master if main not present.
|
||||
if git show-ref --verify --quiet refs/remotes/origin/main; then
|
||||
TARGET_BRANCH="main"
|
||||
else
|
||||
TARGET_BRANCH="master"
|
||||
fi
|
||||
# Ensure local branch exists and is tracking.
|
||||
if git rev-parse --verify "$TARGET_BRANCH" >/dev/null 2>&1; then
|
||||
git checkout "$TARGET_BRANCH"
|
||||
else
|
||||
git checkout -b "$TARGET_BRANCH" "origin/$TARGET_BRANCH"
|
||||
fi
|
||||
git reset --hard "origin/$TARGET_BRANCH"
|
||||
git clean -fd
|
||||
popd >/dev/null
|
||||
fi
|
||||
|
||||
# Recursively mirror all core files from the CDN into the local cores directory.
|
||||
# This will overwrite existing files but will not delete extra local files.
|
||||
# If you want a clean sync, delete the destination directory first.
|
||||
CORES_URL="https://cdn.emulatorjs.org/nightly/data/cores/"
|
||||
DEST_DIR="./gaseous-server/wwwroot/emulators/EmulatorJS/data/cores"
|
||||
|
||||
mkdir -p "$DEST_DIR"
|
||||
|
||||
# Use wget recursive download:
|
||||
# -r : recursive
|
||||
# -np : no parent (stay within cores/)
|
||||
# -nH : don't create host directory
|
||||
# --cut-dirs=3 : strip 'nightly/data/cores' from path so deeper structure starts at cores root
|
||||
# -R "index.html*" : skip auto-generated index listings
|
||||
# -P DEST_DIR : set destination prefix
|
||||
# Existing files are overwritten by default.
|
||||
wget -r -np -nH --cut-dirs=3 -R "index.html*" -P "$DEST_DIR" "$CORES_URL"
|
||||
|
||||
echo "EmulatorJS cores download complete into $DEST_DIR"
|
||||
38
build/scripts/kill-gaseous-server.sh
Executable file
38
build/scripts/kill-gaseous-server.sh
Executable file
|
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Kill only processes whose command line contains the exact token 'gaseous-server'.
|
||||
# 1) Find matching PIDs from ps output (no other patterns searched)
|
||||
# 2) Try SIGTERM with brief waits
|
||||
# 3) SIGKILL if still present
|
||||
|
||||
# Collect candidate PIDs strictly by appearance of 'gaseous-server' in the command line
|
||||
mapfile -t pids < <(ps -eo pid=,args= | grep -F "gaseous-server" | grep -v "grep" | awk '{print $1}' | sort -u)
|
||||
|
||||
if [[ ${#pids[@]} -eq 0 ]]; then
|
||||
echo "No gaseous-server processes found."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Found gaseous-server PIDs: ${pids[*]}"
|
||||
kill -TERM "${pids[@]}" || true
|
||||
|
||||
# Wait up to 5 seconds for graceful shutdown
|
||||
for i in {1..5}; do
|
||||
sleep 1
|
||||
mapfile -t still < <(ps -eo pid=,args= | grep -F "gaseous-server" | grep -v "grep" | awk '{print $1}' | sort -u)
|
||||
if [[ ${#still[@]} -eq 0 ]]; then
|
||||
echo "gaseous-server terminated gracefully."
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for shutdown... ($i)"
|
||||
done
|
||||
|
||||
# Force kill any remaining
|
||||
mapfile -t still < <(ps -eo pid=,args= | grep -F "gaseous-server" | grep -v "grep" | awk '{print $1}' | sort -u)
|
||||
if [[ ${#still[@]} -gt 0 ]]; then
|
||||
echo "Force killing remaining gaseous-server PIDs: ${still[*]}"
|
||||
kill -KILL "${still[@]}" || true
|
||||
fi
|
||||
|
||||
echo "Done."
|
||||
27
build/standard/entrypoint.sh
Normal file
27
build/standard/entrypoint.sh
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
#!/bin/sh
|
||||
|
||||
# create the user
|
||||
echo "Creating user gaseous with UID ${PUID} and GID ${PGID}"
|
||||
getent group ${PGID} > /dev/null 2>&1 || groupadd -g ${PGID} gaseous
|
||||
|
||||
# Check if user with PUID exists
|
||||
if id ${PUID} > /dev/null 2>&1; then
|
||||
# User exists, get its name and rename if necessary
|
||||
CURRENT_USER=$(id -un ${PUID})
|
||||
if [ "$CURRENT_USER" != "gaseous" ]; then
|
||||
usermod -l gaseous -d /home/gaseous "$CURRENT_USER"
|
||||
fi
|
||||
else
|
||||
# User doesn't exist, create it
|
||||
useradd -u ${PUID} -g ${PGID} -m gaseous -d /home/gaseous -G sudo
|
||||
fi
|
||||
usermod -p "*" gaseous
|
||||
mkdir -p /home/gaseous/.aspnet
|
||||
chown -R ${PUID} /App /home/gaseous/.aspnet
|
||||
chgrp -R ${PGID} /App /home/gaseous/.aspnet
|
||||
mkdir -p /home/gaseous/.gaseous-server
|
||||
chown -R ${PUID} /App /home/gaseous/.gaseous-server
|
||||
chgrp -R ${PGID} /App /home/gaseous/.gaseous-server
|
||||
|
||||
# Start supervisord and services
|
||||
/usr/bin/supervisord -c /etc/supervisor/conf.d/supervisord.conf
|
||||
28
build/standard/supervisord.conf
Normal file
28
build/standard/supervisord.conf
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
[supervisord]
|
||||
user=root
|
||||
nodaemon=true
|
||||
logfile=/var/log/supervisord/supervisord.log
|
||||
logfile_maxbytes=50
|
||||
logfile_backups=5
|
||||
pidfile=/var/run/supervisord/supervisord.pid
|
||||
loglevel = info
|
||||
|
||||
[unix_http_server]
|
||||
file=/var/run/supervisord/supervisor.sock
|
||||
chmod=0700
|
||||
|
||||
[rpcinterface:supervisor]
|
||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
||||
|
||||
[supervisorctl]
|
||||
serverurl=unix:///var/run/supervisord/supervisor.sock
|
||||
|
||||
[program:gaseous-server]
|
||||
user=gaseous
|
||||
command=dotnet /App/gaseous-server.dll
|
||||
environment=HOME="/home/gaseous",USER="gaseous"
|
||||
autostart=true
|
||||
autorestart=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
|
|
@ -4,6 +4,7 @@ services:
|
|||
container_name: gaseous-server
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./build/Dockerfile
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- gaseous
|
||||
|
|
@ -12,7 +13,7 @@ services:
|
|||
ports:
|
||||
- 5198:80
|
||||
volumes:
|
||||
- gs:/root/.gaseous-server
|
||||
- gs:/home/gaseous/.gaseous-server
|
||||
environment:
|
||||
- TZ=Australia/Sydney
|
||||
- dbhost=gsdb
|
||||
|
|
|
|||
|
|
@ -1,39 +0,0 @@
|
|||
version: '2'
|
||||
services:
|
||||
gaseous-server:
|
||||
container_name: gaseous-server
|
||||
image: gaseousgames/gaseousserver:latest
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- gaseous
|
||||
depends_on:
|
||||
- gsdb
|
||||
ports:
|
||||
- 5198:80
|
||||
volumes:
|
||||
- gs:/root/.gaseous-server
|
||||
environment:
|
||||
- TZ=Australia/Sydney
|
||||
- dbhost=gsdb
|
||||
- dbuser=root
|
||||
- dbpass=gaseous
|
||||
- igdbclientid=<clientid>
|
||||
- igdbclientsecret=<clientsecret>
|
||||
gsdb:
|
||||
container_name: gsdb
|
||||
image: mariadb
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- gaseous
|
||||
volumes:
|
||||
- gsdb:/var/lib/mysql
|
||||
environment:
|
||||
- MARIADB_ROOT_PASSWORD=gaseous
|
||||
- MARIADB_USER=gaseous
|
||||
- MARIADB_PASSWORD=gaseous
|
||||
networks:
|
||||
gaseous:
|
||||
driver: bridge
|
||||
volumes:
|
||||
gs:
|
||||
gsdb:
|
||||
1
docs
Submodule
1
docs
Submodule
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 1d5f0530f9ae2b868fd89f5fed129b7afcf9ae1c
|
||||
613
gaseous-cli/Program.cs
Normal file
613
gaseous-cli/Program.cs
Normal file
|
|
@ -0,0 +1,613 @@
|
|||
using System;
|
||||
using System.Data;
|
||||
using Authentication;
|
||||
using gaseous_server.Classes;
|
||||
using Microsoft.AspNetCore.Identity;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
/* ------------------------------------------------- */
|
||||
/* This tool is a CLI tool that is used to manage */
|
||||
/* the Gaseous Server. */
|
||||
/* Functions such as user management, and backups */
|
||||
/* are available. */
|
||||
/* ------------------------------------------------- */
|
||||
|
||||
// load app settings
|
||||
Config.InitSettings();
|
||||
|
||||
// set up database connection
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
// set up identity
|
||||
IServiceCollection services = new ServiceCollection();
|
||||
services.AddLogging();
|
||||
|
||||
services.AddIdentity<ApplicationUser, ApplicationRole>(options =>
|
||||
{
|
||||
options.Password.RequireDigit = true;
|
||||
options.Password.RequireLowercase = true;
|
||||
options.Password.RequireNonAlphanumeric = false;
|
||||
options.Password.RequireUppercase = true;
|
||||
options.Password.RequiredLength = 10;
|
||||
options.User.AllowedUserNameCharacters = null;
|
||||
options.User.RequireUniqueEmail = true;
|
||||
options.SignIn.RequireConfirmedPhoneNumber = false;
|
||||
options.SignIn.RequireConfirmedEmail = false;
|
||||
options.SignIn.RequireConfirmedAccount = false;
|
||||
})
|
||||
.AddUserStore<UserStore>()
|
||||
.AddRoleStore<RoleStore>()
|
||||
.AddDefaultTokenProviders()
|
||||
;
|
||||
services.AddScoped<UserStore>();
|
||||
services.AddScoped<RoleStore>();
|
||||
|
||||
services.AddTransient<IUserStore<ApplicationUser>, UserStore>();
|
||||
services.AddTransient<IRoleStore<ApplicationRole>, RoleStore>();
|
||||
var userManager = services.BuildServiceProvider().GetService<UserManager<ApplicationUser>>();
|
||||
|
||||
// load the command line arguments
|
||||
string[] cmdArgs = Environment.GetCommandLineArgs();
|
||||
|
||||
// check if the user has entered any arguments
|
||||
if (cmdArgs.Length == 1)
|
||||
{
|
||||
// no arguments were entered
|
||||
Console.WriteLine("Gaseous CLI - A tool for managing the Gaseous Server");
|
||||
Console.WriteLine("Usage: gaseous-cli [command] [options]");
|
||||
Console.WriteLine("Commands:");
|
||||
Console.WriteLine(" user [command] [options] - Manage users");
|
||||
Console.WriteLine(" 2fa [subcommand] - 2FA operations: resetkey|getkey|enable|genrc|countrc|redeem");
|
||||
Console.WriteLine(" role [command] [options] - Manage roles");
|
||||
Console.WriteLine(" db [command] [options] - Database operations: migrate|validate|backup|restore|status");
|
||||
Console.WriteLine(" help - Display this help message");
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the user has entered the help command
|
||||
if (cmdArgs[1] == "help")
|
||||
{
|
||||
// display the help message
|
||||
Console.WriteLine("Gaseous CLI - A tool for managing the Gaseous Server");
|
||||
Console.WriteLine("Usage: gaseous-cli [command] [options]");
|
||||
Console.WriteLine("Commands:");
|
||||
Console.WriteLine(" user [command] [options] - Manage users");
|
||||
Console.WriteLine(" 2fa [subcommand] - 2FA operations: resetkey|getkey|enable|genrc|countrc|redeem");
|
||||
Console.WriteLine(" role [command] [options] - Manage roles");
|
||||
Console.WriteLine(" db [command] [options] - Database operations: migrate|validate|backup|restore|status");
|
||||
Console.WriteLine(" help - Display this help message");
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the user has entered the user command
|
||||
if (cmdArgs[1] == "user")
|
||||
{
|
||||
// check if the user has entered any arguments
|
||||
if (cmdArgs.Length == 2)
|
||||
{
|
||||
// no arguments were entered
|
||||
Console.WriteLine("User Management");
|
||||
Console.WriteLine("Usage: gaseous-cli user [command] [options]");
|
||||
Console.WriteLine("Commands:");
|
||||
Console.WriteLine(" add [username] [password] - Add a new user");
|
||||
Console.WriteLine(" delete [username] - Delete a user");
|
||||
Console.WriteLine(" resetpassword [username] [password] - Reset a user's password");
|
||||
Console.WriteLine(" list - List all users");
|
||||
Console.WriteLine(" 2fa [subcommand] - Manage 2FA (resetkey|getkey|enable|genrc|countrc|redeem)");
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the user has entered the add command
|
||||
if (cmdArgs[2] == "add")
|
||||
{
|
||||
// check if the user has entered the username and password
|
||||
if (cmdArgs.Length < 5)
|
||||
{
|
||||
// the username and password were not entered
|
||||
Console.WriteLine("Error: Please enter a username and password");
|
||||
return;
|
||||
}
|
||||
|
||||
// add a new user
|
||||
UserTable<ApplicationUser> userTable = new UserTable<ApplicationUser>(db);
|
||||
if (userTable.GetUserByEmail(cmdArgs[3]) != null)
|
||||
{
|
||||
Console.WriteLine("Error: User already exists");
|
||||
return;
|
||||
}
|
||||
|
||||
// create the user object
|
||||
ApplicationUser user = new ApplicationUser
|
||||
{
|
||||
Id = Guid.NewGuid().ToString(),
|
||||
Email = cmdArgs[3],
|
||||
NormalizedEmail = cmdArgs[3].ToUpper(),
|
||||
EmailConfirmed = true,
|
||||
UserName = cmdArgs[3],
|
||||
NormalizedUserName = cmdArgs[3].ToUpper()
|
||||
};
|
||||
|
||||
// create the password
|
||||
PasswordHasher<ApplicationUser> passwordHasher = new PasswordHasher<ApplicationUser>();
|
||||
user.PasswordHash = passwordHasher.HashPassword(user, cmdArgs[4]);
|
||||
|
||||
await userManager.CreateAsync(user);
|
||||
await userManager.AddToRoleAsync(user, "Player");
|
||||
|
||||
Console.WriteLine("User created successfully with default role: Player");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// 2FA subcommands
|
||||
if (cmdArgs[2] == "2fa")
|
||||
{
|
||||
// help
|
||||
if (cmdArgs.Length < 4)
|
||||
{
|
||||
Console.WriteLine("2FA Management");
|
||||
Console.WriteLine("Usage: gaseous-cli user 2fa [subcommand] [args]");
|
||||
Console.WriteLine("Subcommands:");
|
||||
Console.WriteLine(" enable [username] [true|false] - Enable/disable 2FA flag");
|
||||
Console.WriteLine(" resetkey [username] - Reset authenticator key and print it");
|
||||
Console.WriteLine(" getkey [username] - Display current authenticator key");
|
||||
Console.WriteLine(" genrc [username] [count] - Generate new recovery codes and print them");
|
||||
Console.WriteLine(" countrc [username] - Count remaining recovery codes");
|
||||
Console.WriteLine(" redeem [username] [code] - Redeem a recovery code");
|
||||
return;
|
||||
}
|
||||
|
||||
var sub = cmdArgs[3].ToLowerInvariant();
|
||||
UserTable<ApplicationUser> userTable = new UserTable<ApplicationUser>(db);
|
||||
ApplicationUser user = userTable.GetUserByEmail(cmdArgs.Length > 4 ? cmdArgs[4] : "");
|
||||
if (user == null)
|
||||
{
|
||||
Console.WriteLine("Error: User not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (sub == "enable")
|
||||
{
|
||||
if (cmdArgs.Length < 6)
|
||||
{
|
||||
Console.WriteLine("Error: Please provide true or false");
|
||||
return;
|
||||
}
|
||||
bool enabled = bool.Parse(cmdArgs[5]);
|
||||
await userManager.SetTwoFactorEnabledAsync(user, enabled);
|
||||
Console.WriteLine($"TwoFactorEnabled set to {enabled}");
|
||||
return;
|
||||
}
|
||||
else if (sub == "resetkey")
|
||||
{
|
||||
await userManager.ResetAuthenticatorKeyAsync(user);
|
||||
var key = await userManager.GetAuthenticatorKeyAsync(user);
|
||||
Console.WriteLine($"New Authenticator Key: {key}");
|
||||
return;
|
||||
}
|
||||
else if (sub == "getkey")
|
||||
{
|
||||
var key = await userManager.GetAuthenticatorKeyAsync(user);
|
||||
Console.WriteLine(key == null ? "(no key)" : key);
|
||||
return;
|
||||
}
|
||||
else if (sub == "genrc")
|
||||
{
|
||||
int count = 5;
|
||||
if (cmdArgs.Length >= 6) int.TryParse(cmdArgs[5], out count);
|
||||
var codes = await userManager.GenerateNewTwoFactorRecoveryCodesAsync(user, count);
|
||||
Console.WriteLine("Generated recovery codes:");
|
||||
foreach (var c in codes)
|
||||
{
|
||||
Console.WriteLine(c);
|
||||
}
|
||||
var remaining = await userManager.CountRecoveryCodesAsync(user);
|
||||
Console.WriteLine($"Remaining code count: {remaining}");
|
||||
return;
|
||||
}
|
||||
else if (sub == "countrc")
|
||||
{
|
||||
var remaining = await userManager.CountRecoveryCodesAsync(user);
|
||||
Console.WriteLine(remaining);
|
||||
return;
|
||||
}
|
||||
else if (sub == "redeem")
|
||||
{
|
||||
if (cmdArgs.Length < 6)
|
||||
{
|
||||
Console.WriteLine("Error: Please provide a recovery code to redeem");
|
||||
return;
|
||||
}
|
||||
var code = cmdArgs[5];
|
||||
var result = await userManager.RedeemTwoFactorRecoveryCodeAsync(user, code);
|
||||
Console.WriteLine(result.Succeeded ? "Redeemed" : "Failed to redeem");
|
||||
var remaining = await userManager.CountRecoveryCodesAsync(user);
|
||||
Console.WriteLine($"Remaining code count: {remaining}");
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine("Error: Unknown 2fa subcommand");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// check if the user has entered the delete command
|
||||
if (cmdArgs[2] == "delete")
|
||||
{
|
||||
// check if the user has entered the username
|
||||
if (cmdArgs.Length < 4)
|
||||
{
|
||||
// the username was not entered
|
||||
Console.WriteLine("Error: Please enter a username");
|
||||
return;
|
||||
}
|
||||
|
||||
// delete the user
|
||||
UserTable<ApplicationUser> userTable = new UserTable<ApplicationUser>(db);
|
||||
ApplicationUser user = userTable.GetUserByEmail(cmdArgs[3]);
|
||||
if (user == null)
|
||||
{
|
||||
Console.WriteLine("Error: User not found");
|
||||
return;
|
||||
}
|
||||
|
||||
await userManager.DeleteAsync(user);
|
||||
|
||||
Console.WriteLine("User deleted successfully");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the user has entered the resetpassword command
|
||||
if (cmdArgs[2] == "resetpassword")
|
||||
{
|
||||
// check if the user has entered the username and password
|
||||
if (cmdArgs.Length < 5)
|
||||
{
|
||||
// the username and password were not entered
|
||||
Console.WriteLine("Error: Please enter a username and password");
|
||||
return;
|
||||
}
|
||||
|
||||
// reset the user's password
|
||||
UserTable<ApplicationUser> userTable = new UserTable<ApplicationUser>(db);
|
||||
ApplicationUser user = userTable.GetUserByEmail(cmdArgs[3]);
|
||||
if (user == null)
|
||||
{
|
||||
Console.WriteLine("Error: User not found");
|
||||
return;
|
||||
}
|
||||
|
||||
// create the password
|
||||
PasswordHasher<ApplicationUser> passwordHasher = new PasswordHasher<ApplicationUser>();
|
||||
user.PasswordHash = passwordHasher.HashPassword(user, cmdArgs[4]);
|
||||
|
||||
await userManager.UpdateAsync(user);
|
||||
|
||||
Console.WriteLine("Password reset successfully");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the user has entered the list command
|
||||
if (cmdArgs[2] == "list")
|
||||
{
|
||||
// list all users
|
||||
UserTable<ApplicationUser> userTable = new UserTable<ApplicationUser>(db);
|
||||
var userList = userTable.GetUsers();
|
||||
foreach (var user in userList)
|
||||
{
|
||||
var roles = await userManager.GetRolesAsync(user);
|
||||
Console.WriteLine(user.Email + " - " + string.Join(", ", roles));
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// check if the user has entered the role command
|
||||
if (cmdArgs[1] == "role")
|
||||
{
|
||||
// check if the user has entered any arguments
|
||||
if (cmdArgs.Length == 2)
|
||||
{
|
||||
// no arguments were entered
|
||||
Console.WriteLine("Role Management");
|
||||
Console.WriteLine("Usage: gaseous-cli role [command] [options]");
|
||||
Console.WriteLine("Commands:");
|
||||
Console.WriteLine(" set [username] [role] - Set the role of a user");
|
||||
Console.WriteLine(" list - List all roles");
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the user has entered the role command
|
||||
if (cmdArgs[2] == "set")
|
||||
{
|
||||
// check if the user has entered the username and role
|
||||
if (cmdArgs.Length < 5)
|
||||
{
|
||||
// the username and role were not entered
|
||||
Console.WriteLine("Error: Please enter a username and role");
|
||||
return;
|
||||
}
|
||||
|
||||
// set the role of the user
|
||||
UserTable<ApplicationUser> userTable = new UserTable<ApplicationUser>(db);
|
||||
ApplicationUser user = userTable.GetUserByEmail(cmdArgs[3]);
|
||||
if (user == null)
|
||||
{
|
||||
Console.WriteLine("Error: User not found");
|
||||
return;
|
||||
}
|
||||
|
||||
// remove all existing roles from user
|
||||
var roles = await userManager.GetRolesAsync(user);
|
||||
await userManager.RemoveFromRolesAsync(user, roles.ToArray());
|
||||
|
||||
// add the new role to the user
|
||||
await userManager.AddToRoleAsync(user, cmdArgs[4]);
|
||||
|
||||
Console.WriteLine("Role set successfully");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the user has entered the list command
|
||||
if (cmdArgs[2] == "list")
|
||||
{
|
||||
// list all roles
|
||||
string[] roles = { "Player", "Gamer", "Admin" };
|
||||
foreach (var role in roles)
|
||||
{
|
||||
Console.WriteLine(role);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// // check if the user has entered the backup command
|
||||
// if (cmdArgs[1] == "backup")
|
||||
// {
|
||||
// // check if the user has entered any arguments
|
||||
// if (cmdArgs.Length == 2)
|
||||
// {
|
||||
// // no arguments were entered
|
||||
// Console.WriteLine("Backup Management");
|
||||
// Console.WriteLine("Usage: gaseous-cli backup [command] [options]");
|
||||
// Console.WriteLine("Commands:");
|
||||
// Console.WriteLine(" create - Create a backup");
|
||||
// Console.WriteLine(" list - List all backups");
|
||||
// Console.WriteLine(" remove [backup_id] - Remove a backup");
|
||||
// return;
|
||||
// }
|
||||
|
||||
// // check if the user has entered the create command
|
||||
// if (cmdArgs[2] == "create")
|
||||
// {
|
||||
// // create a backup
|
||||
// Backup.CreateBackup();
|
||||
// return;
|
||||
// }
|
||||
|
||||
// // check if the user has entered the list command
|
||||
// if (cmdArgs[2] == "list")
|
||||
// {
|
||||
// // list all backups
|
||||
// Backup.ListBackups();
|
||||
// return;
|
||||
// }
|
||||
|
||||
// // check if the user has entered the remove command
|
||||
// if (cmdArgs[2] == "remove")
|
||||
// {
|
||||
// // check if the user has entered the backup id
|
||||
// if (cmdArgs.Length < 4)
|
||||
// {
|
||||
// // the backup id was not entered
|
||||
// Console.WriteLine("Error: Please enter a backup id");
|
||||
// return;
|
||||
// }
|
||||
|
||||
// // remove the backup
|
||||
// Backup.RemoveBackup(cmdArgs[3]);
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
|
||||
// // check if the user has entered the restore command
|
||||
// if (cmdArgs[1] == "restore")
|
||||
// {
|
||||
// // check if the user has entered any arguments
|
||||
// if (cmdArgs.Length == 2)
|
||||
// {
|
||||
// // no arguments were entered
|
||||
// Console.WriteLine("Restore Management");
|
||||
// Console.WriteLine("Usage: gaseous-cli restore [command] [options]");
|
||||
// Console.WriteLine("Commands:");
|
||||
// Console.WriteLine(" restore [backup_id] - Restore a backup");
|
||||
// return;
|
||||
// }
|
||||
|
||||
// // check if the user has entered the restore command
|
||||
// if (cmdArgs[2] == "restore")
|
||||
// {
|
||||
// // check if the user has entered the backup id
|
||||
// if (cmdArgs.Length < 4)
|
||||
// {
|
||||
// // the backup id was not entered
|
||||
// Console.WriteLine("Error: Please enter a backup id");
|
||||
// return;
|
||||
// }
|
||||
|
||||
// // restore the backup
|
||||
// Restore.RestoreBackup(cmdArgs[3]);
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// db command: migrate, validate, backup, restore, status
|
||||
// -----------------------------------------------------------------------
|
||||
if (cmdArgs[1] == "db")
|
||||
{
|
||||
if (cmdArgs.Length < 3)
|
||||
{
|
||||
Console.WriteLine("Database Management");
|
||||
Console.WriteLine("Usage: gaseous-cli db [command] [options]");
|
||||
Console.WriteLine("Commands:");
|
||||
Console.WriteLine(" migrate - Apply pending database migrations");
|
||||
Console.WriteLine(" validate - Validate database structure against migration manifest");
|
||||
Console.WriteLine(" backup - Take a backup of the database");
|
||||
Console.WriteLine(" restore [file] - Restore the database from a backup file");
|
||||
Console.WriteLine(" status - Show recent migration journal entries");
|
||||
return;
|
||||
}
|
||||
|
||||
// --- db migrate ---
|
||||
if (cmdArgs[2] == "migrate")
|
||||
{
|
||||
Console.WriteLine("Starting database migration...");
|
||||
try
|
||||
{
|
||||
Logging.WriteToDiskOnly = true;
|
||||
Database migrateDb = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
await migrateDb.InitDB();
|
||||
Logging.WriteToDiskOnly = false;
|
||||
Console.WriteLine("Migration complete.");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.WriteToDiskOnly = false;
|
||||
Console.Error.WriteLine($"Migration failed: {ex.Message}");
|
||||
Environment.Exit(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// --- db validate ---
|
||||
if (cmdArgs[2] == "validate")
|
||||
{
|
||||
Console.WriteLine("Validating database structure...");
|
||||
|
||||
Database validateDb = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
int currentVersion = validateDb.GetDatabaseSchemaVersion();
|
||||
|
||||
bool passed = DatabaseMigrationValidator.ValidateRange(0, currentVersion);
|
||||
if (passed)
|
||||
{
|
||||
Console.WriteLine($"All validation checks passed for schema version {currentVersion}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.Error.WriteLine($"One or more critical validation checks FAILED for schema version {currentVersion}. Review logs for details.");
|
||||
Environment.Exit(2);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// --- db backup ---
|
||||
if (cmdArgs[2] == "backup")
|
||||
{
|
||||
try
|
||||
{
|
||||
string path = DatabaseBackup.GenerateBackupPath();
|
||||
DatabaseBackup.Backup(path);
|
||||
Console.WriteLine($"Backup created: {path}");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Backup failed: {ex.Message}");
|
||||
Environment.Exit(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// --- db restore [file] ---
|
||||
if (cmdArgs[2] == "restore")
|
||||
{
|
||||
if (cmdArgs.Length < 4)
|
||||
{
|
||||
Console.Error.WriteLine("Error: Please provide a backup file path");
|
||||
Console.Error.WriteLine("Usage: gaseous-cli db restore <path-to-backup.sql>");
|
||||
Environment.Exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
string restorePath = cmdArgs[3];
|
||||
Console.WriteLine($"Restoring database from: {restorePath}");
|
||||
Console.WriteLine("WARNING: This will overwrite all current data. Press Ctrl+C within 5 seconds to abort.");
|
||||
await Task.Delay(5000);
|
||||
|
||||
try
|
||||
{
|
||||
DatabaseBackup.Restore(restorePath);
|
||||
Console.WriteLine("Restore complete.");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Restore failed: {ex.Message}");
|
||||
Environment.Exit(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// --- db status ---
|
||||
if (cmdArgs[2] == "status")
|
||||
{
|
||||
int limit = 20;
|
||||
if (cmdArgs.Length >= 4) int.TryParse(cmdArgs[3], out limit);
|
||||
|
||||
try
|
||||
{
|
||||
MigrationJournal.EnsureTable();
|
||||
DataTable journal = MigrationJournal.GetRecentEntries(limit);
|
||||
|
||||
Console.WriteLine($"{"Version",-10} {"Type",-15} {"Step",-40} {"Status",-12} {"Started",-22} {"Completed",-22}");
|
||||
Console.WriteLine(new string('-', 125));
|
||||
|
||||
foreach (DataRow row in journal.Rows)
|
||||
{
|
||||
string completed = row["CompletedAt"] == DBNull.Value
|
||||
? "-"
|
||||
: Convert.ToDateTime(row["CompletedAt"]).ToString("yyyy-MM-dd HH:mm:ss");
|
||||
Console.WriteLine(
|
||||
$"{row["SchemaVersion"],-10} " +
|
||||
$"{row["StepType"],-15} " +
|
||||
$"{TruncateTo(row["StepName"].ToString(), 40),-40} " +
|
||||
$"{row["Status"],-12} " +
|
||||
$"{Convert.ToDateTime(row["StartedAt"]).ToString("yyyy-MM-dd HH:mm:ss"),-22} " +
|
||||
$"{completed,-22}");
|
||||
|
||||
if (row["Status"].ToString() == "Failed" && row["ErrorMessage"] != DBNull.Value)
|
||||
{
|
||||
Console.ForegroundColor = ConsoleColor.Red;
|
||||
Console.WriteLine($" ERROR: {row["ErrorMessage"]}");
|
||||
Console.ResetColor();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Unable to read migration journal: {ex.Message}");
|
||||
Environment.Exit(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
Console.Error.WriteLine($"Error: Unknown db subcommand '{cmdArgs[2]}'");
|
||||
Environment.Exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
// the user entered an invalid command
|
||||
Console.WriteLine("Error: Invalid command");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Local helper functions
|
||||
// -----------------------------------------------------------------------
|
||||
static string TruncateTo(string? value, int maxLength)
|
||||
{
|
||||
if (string.IsNullOrEmpty(value)) return "";
|
||||
return value.Length <= maxLength ? value : value.Substring(0, maxLength - 3) + "...";
|
||||
}
|
||||
29
gaseous-cli/gaseous-cli.csproj
Normal file
29
gaseous-cli/gaseous-cli.csproj
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<RootNamespace>gaseous_cli</RootNamespace>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<RuntimeIdentifiers>win-x64;linux-x64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<WarningLevel>4</WarningLevel>
|
||||
<DocumentationFile>bin\Debug\net10.0\gaseous-cli.xml</DocumentationFile>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
|
||||
<WarningLevel>4</WarningLevel>
|
||||
<DocumentationFile>bin\Release\net10.0\gaseous-cli.xml</DocumentationFile>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
|
||||
<PackageReference Include="MySqlConnector" Version="2.5.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../gaseous-lib/gaseous-lib.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
357
gaseous-configurator/MainForm.Designer.cs
generated
Normal file
357
gaseous-configurator/MainForm.Designer.cs
generated
Normal file
|
|
@ -0,0 +1,357 @@
|
|||
namespace gaseous_configurator
|
||||
{
|
||||
partial class MainForm
|
||||
{
|
||||
private System.ComponentModel.IContainer components = null;
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (disposing && (components != null))
|
||||
{
|
||||
components.Dispose();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
private void InitializeComponent()
|
||||
{
|
||||
this.lblHost = new System.Windows.Forms.Label();
|
||||
this.txtHost = new System.Windows.Forms.TextBox();
|
||||
this.lblPort = new System.Windows.Forms.Label();
|
||||
this.numPort = new System.Windows.Forms.NumericUpDown();
|
||||
this.lblWebPort = new System.Windows.Forms.Label();
|
||||
this.numWebPort = new System.Windows.Forms.NumericUpDown();
|
||||
this.lblUser = new System.Windows.Forms.Label();
|
||||
this.txtUser = new System.Windows.Forms.TextBox();
|
||||
this.lblPass = new System.Windows.Forms.Label();
|
||||
this.txtPass = new System.Windows.Forms.TextBox();
|
||||
this.lblDb = new System.Windows.Forms.Label();
|
||||
this.txtDb = new System.Windows.Forms.TextBox();
|
||||
this.btnSave = new System.Windows.Forms.Button();
|
||||
this.lblStatus = new System.Windows.Forms.Label();
|
||||
this.btnStartService = new System.Windows.Forms.Button();
|
||||
this.lblPath = new System.Windows.Forms.Label();
|
||||
this.btnStopService = new System.Windows.Forms.Button();
|
||||
this.btnRestartService = new System.Windows.Forms.Button();
|
||||
this.btnRemoveService = new System.Windows.Forms.Button();
|
||||
this.btnOpenLogs = new System.Windows.Forms.Button();
|
||||
this.btnOpenBrowser = new System.Windows.Forms.Button();
|
||||
this.statusStrip = new System.Windows.Forms.StatusStrip();
|
||||
this.serviceStatusLabel = new System.Windows.Forms.ToolStripStatusLabel();
|
||||
this.actionStatusLabel = new System.Windows.Forms.ToolStripStatusLabel();
|
||||
((System.ComponentModel.ISupportInitialize)(this.numPort)).BeginInit();
|
||||
((System.ComponentModel.ISupportInitialize)(this.numWebPort)).BeginInit();
|
||||
this.statusStrip.SuspendLayout();
|
||||
this.SuspendLayout();
|
||||
//
|
||||
// lblHost
|
||||
//
|
||||
this.lblHost.AutoSize = true;
|
||||
this.lblHost.Location = new System.Drawing.Point(12, 45);
|
||||
this.lblHost.Name = "lblHost";
|
||||
this.lblHost.Size = new System.Drawing.Size(53, 15);
|
||||
this.lblHost.TabIndex = 0;
|
||||
this.lblHost.Text = "Hostname";
|
||||
//
|
||||
// txtHost
|
||||
//
|
||||
this.txtHost.Location = new System.Drawing.Point(120, 42);
|
||||
this.txtHost.Name = "txtHost";
|
||||
this.txtHost.Size = new System.Drawing.Size(240, 23);
|
||||
this.txtHost.TabIndex = 1;
|
||||
//
|
||||
// lblPort
|
||||
//
|
||||
this.lblPort.AutoSize = true;
|
||||
this.lblPort.Location = new System.Drawing.Point(12, 77);
|
||||
this.lblPort.Name = "lblPort";
|
||||
this.lblPort.Size = new System.Drawing.Size(29, 15);
|
||||
this.lblPort.TabIndex = 2;
|
||||
this.lblPort.Text = "Port";
|
||||
//
|
||||
// numPort
|
||||
//
|
||||
this.numPort.Location = new System.Drawing.Point(120, 75);
|
||||
this.numPort.Maximum = new decimal(new int[] {
|
||||
65535,
|
||||
0,
|
||||
0,
|
||||
0});
|
||||
this.numPort.Minimum = new decimal(new int[] {
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0});
|
||||
this.numPort.Name = "numPort";
|
||||
this.numPort.Size = new System.Drawing.Size(120, 23);
|
||||
this.numPort.TabIndex = 3;
|
||||
this.numPort.Value = new decimal(new int[] {
|
||||
3306,
|
||||
0,
|
||||
0,
|
||||
0});
|
||||
//
|
||||
// lblWebPort
|
||||
//
|
||||
this.lblWebPort.AutoSize = true;
|
||||
this.lblWebPort.Location = new System.Drawing.Point(12, 201);
|
||||
this.lblWebPort.Name = "lblWebPort";
|
||||
this.lblWebPort.Size = new System.Drawing.Size(57, 15);
|
||||
this.lblWebPort.TabIndex = 19;
|
||||
this.lblWebPort.Text = "Web Port";
|
||||
//
|
||||
// numWebPort
|
||||
//
|
||||
this.numWebPort.Location = new System.Drawing.Point(120, 199);
|
||||
this.numWebPort.Maximum = new decimal(new int[] {
|
||||
65535,
|
||||
0,
|
||||
0,
|
||||
0});
|
||||
this.numWebPort.Minimum = new decimal(new int[] {
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0});
|
||||
this.numWebPort.Name = "numWebPort";
|
||||
this.numWebPort.Size = new System.Drawing.Size(120, 23);
|
||||
this.numWebPort.TabIndex = 20;
|
||||
this.numWebPort.Value = new decimal(new int[] {
|
||||
5198,
|
||||
0,
|
||||
0,
|
||||
0});
|
||||
//
|
||||
// lblUser
|
||||
//
|
||||
this.lblUser.AutoSize = true;
|
||||
this.lblUser.Location = new System.Drawing.Point(12, 109);
|
||||
this.lblUser.Name = "lblUser";
|
||||
this.lblUser.Size = new System.Drawing.Size(63, 15);
|
||||
this.lblUser.TabIndex = 4;
|
||||
this.lblUser.Text = "User name";
|
||||
//
|
||||
// txtUser
|
||||
//
|
||||
this.txtUser.Location = new System.Drawing.Point(120, 106);
|
||||
this.txtUser.Name = "txtUser";
|
||||
this.txtUser.Size = new System.Drawing.Size(240, 23);
|
||||
this.txtUser.TabIndex = 5;
|
||||
//
|
||||
// lblPass
|
||||
//
|
||||
this.lblPass.AutoSize = true;
|
||||
this.lblPass.Location = new System.Drawing.Point(12, 141);
|
||||
this.lblPass.Name = "lblPass";
|
||||
this.lblPass.Size = new System.Drawing.Size(57, 15);
|
||||
this.lblPass.TabIndex = 6;
|
||||
this.lblPass.Text = "Password";
|
||||
//
|
||||
// txtPass
|
||||
//
|
||||
this.txtPass.Location = new System.Drawing.Point(120, 138);
|
||||
this.txtPass.Name = "txtPass";
|
||||
this.txtPass.PasswordChar = '•';
|
||||
this.txtPass.Size = new System.Drawing.Size(240, 23);
|
||||
this.txtPass.TabIndex = 7;
|
||||
//
|
||||
// lblDb
|
||||
//
|
||||
this.lblDb.AutoSize = true;
|
||||
this.lblDb.Location = new System.Drawing.Point(12, 173);
|
||||
this.lblDb.Name = "lblDb";
|
||||
this.lblDb.Size = new System.Drawing.Size(90, 15);
|
||||
this.lblDb.TabIndex = 8;
|
||||
this.lblDb.Text = "Database name";
|
||||
//
|
||||
// txtDb
|
||||
//
|
||||
this.txtDb.Location = new System.Drawing.Point(120, 170);
|
||||
this.txtDb.Name = "txtDb";
|
||||
this.txtDb.Size = new System.Drawing.Size(240, 23);
|
||||
this.txtDb.TabIndex = 9;
|
||||
//
|
||||
// btnSave
|
||||
//
|
||||
this.btnSave.Location = new System.Drawing.Point(416, 230);
|
||||
this.btnSave.Name = "btnSave";
|
||||
this.btnSave.Size = new System.Drawing.Size(90, 30);
|
||||
this.btnSave.TabIndex = 11;
|
||||
this.btnSave.Text = "Save";
|
||||
this.btnSave.UseVisualStyleBackColor = true;
|
||||
this.btnSave.Click += new System.EventHandler(this.btnSave_Click);
|
||||
//
|
||||
// lblStatus
|
||||
//
|
||||
this.lblStatus.AutoSize = true;
|
||||
this.lblStatus.ForeColor = System.Drawing.SystemColors.GrayText;
|
||||
this.lblStatus.Location = new System.Drawing.Point(12, 270);
|
||||
this.lblStatus.Name = "lblStatus";
|
||||
this.lblStatus.Size = new System.Drawing.Size(0, 15);
|
||||
this.lblStatus.TabIndex = 13;
|
||||
//
|
||||
// lblPath
|
||||
//
|
||||
this.lblPath.AutoSize = false;
|
||||
this.lblPath.AutoEllipsis = true;
|
||||
this.lblPath.ForeColor = System.Drawing.SystemColors.GrayText;
|
||||
this.lblPath.Location = new System.Drawing.Point(12, 12);
|
||||
this.lblPath.Name = "lblPath";
|
||||
this.lblPath.Size = new System.Drawing.Size(370, 15);
|
||||
this.lblPath.TabIndex = 14;
|
||||
//
|
||||
// btnStartService
|
||||
//
|
||||
this.btnStartService.Location = new System.Drawing.Point(204, 230);
|
||||
this.btnStartService.Name = "btnStartService";
|
||||
this.btnStartService.Size = new System.Drawing.Size(110, 30);
|
||||
this.btnStartService.TabIndex = 12;
|
||||
this.btnStartService.Text = "Start Service";
|
||||
this.btnStartService.UseVisualStyleBackColor = true;
|
||||
this.btnStartService.Click += new System.EventHandler(this.btnStartService_Click);
|
||||
//
|
||||
// btnStopService
|
||||
//
|
||||
this.btnStopService.Location = new System.Drawing.Point(108, 230);
|
||||
this.btnStopService.Name = "btnStopService";
|
||||
this.btnStopService.Size = new System.Drawing.Size(90, 30);
|
||||
this.btnStopService.TabIndex = 13;
|
||||
this.btnStopService.Text = "Stop";
|
||||
this.btnStopService.UseVisualStyleBackColor = true;
|
||||
this.btnStopService.Click += new System.EventHandler(this.btnStopService_Click);
|
||||
//
|
||||
// btnRestartService
|
||||
//
|
||||
this.btnRestartService.Location = new System.Drawing.Point(12, 230);
|
||||
this.btnRestartService.Name = "btnRestartService";
|
||||
this.btnRestartService.Size = new System.Drawing.Size(90, 30);
|
||||
this.btnRestartService.TabIndex = 14;
|
||||
this.btnRestartService.Text = "Restart";
|
||||
this.btnRestartService.UseVisualStyleBackColor = true;
|
||||
this.btnRestartService.Click += new System.EventHandler(this.btnRestartService_Click);
|
||||
//
|
||||
// btnRemoveService
|
||||
//
|
||||
this.btnRemoveService.Location = new System.Drawing.Point(320, 230);
|
||||
this.btnRemoveService.Name = "btnRemoveService";
|
||||
this.btnRemoveService.Size = new System.Drawing.Size(90, 30);
|
||||
this.btnRemoveService.TabIndex = 15;
|
||||
this.btnRemoveService.Text = "Remove";
|
||||
this.btnRemoveService.UseVisualStyleBackColor = true;
|
||||
this.btnRemoveService.Click += new System.EventHandler(this.btnRemoveService_Click);
|
||||
//
|
||||
// btnOpenLogs
|
||||
//
|
||||
this.btnOpenLogs.Location = new System.Drawing.Point(398, 8);
|
||||
this.btnOpenLogs.Name = "btnOpenLogs";
|
||||
this.btnOpenLogs.Size = new System.Drawing.Size(110, 26);
|
||||
this.btnOpenLogs.TabIndex = 18;
|
||||
this.btnOpenLogs.Text = "Open Logs";
|
||||
this.btnOpenLogs.UseVisualStyleBackColor = true;
|
||||
this.btnOpenLogs.Click += new System.EventHandler(this.btnOpenLogs_Click);
|
||||
//
|
||||
// btnOpenBrowser
|
||||
//
|
||||
this.btnOpenBrowser.Location = new System.Drawing.Point(282, 8);
|
||||
this.btnOpenBrowser.Name = "btnOpenBrowser";
|
||||
this.btnOpenBrowser.Size = new System.Drawing.Size(110, 26);
|
||||
this.btnOpenBrowser.TabIndex = 21;
|
||||
this.btnOpenBrowser.Text = "Open Web";
|
||||
this.btnOpenBrowser.UseVisualStyleBackColor = true;
|
||||
this.btnOpenBrowser.Click += new System.EventHandler(this.btnOpenBrowser_Click);
|
||||
//
|
||||
// statusStrip
|
||||
//
|
||||
this.statusStrip.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
|
||||
this.serviceStatusLabel,
|
||||
this.actionStatusLabel});
|
||||
this.statusStrip.Location = new System.Drawing.Point(0, 278);
|
||||
this.statusStrip.Name = "statusStrip";
|
||||
this.statusStrip.Size = new System.Drawing.Size(520, 22);
|
||||
this.statusStrip.SizingGrip = false;
|
||||
this.statusStrip.TabIndex = 19;
|
||||
this.statusStrip.Text = "statusStrip";
|
||||
//
|
||||
// serviceStatusLabel
|
||||
//
|
||||
this.serviceStatusLabel.ForeColor = System.Drawing.SystemColors.GrayText;
|
||||
this.serviceStatusLabel.Name = "serviceStatusLabel";
|
||||
this.serviceStatusLabel.Size = new System.Drawing.Size(0, 17);
|
||||
this.serviceStatusLabel.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
|
||||
//
|
||||
// actionStatusLabel
|
||||
//
|
||||
this.actionStatusLabel.ForeColor = System.Drawing.SystemColors.GrayText;
|
||||
this.actionStatusLabel.Name = "actionStatusLabel";
|
||||
this.actionStatusLabel.Size = new System.Drawing.Size(0, 17);
|
||||
this.actionStatusLabel.Spring = true;
|
||||
this.actionStatusLabel.TextAlign = System.Drawing.ContentAlignment.MiddleRight;
|
||||
//
|
||||
// MainForm
|
||||
//
|
||||
this.AutoScaleDimensions = new System.Drawing.SizeF(7F, 15F);
|
||||
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
|
||||
this.ClientSize = new System.Drawing.Size(520, 300);
|
||||
this.Controls.Add(this.btnOpenBrowser);
|
||||
this.Controls.Add(this.statusStrip);
|
||||
this.Controls.Add(this.btnOpenLogs);
|
||||
this.Controls.Add(this.btnRemoveService);
|
||||
this.Controls.Add(this.btnRestartService);
|
||||
this.Controls.Add(this.btnStopService);
|
||||
this.Controls.Add(this.btnStartService);
|
||||
this.Controls.Add(this.lblStatus);
|
||||
this.Controls.Add(this.lblPath);
|
||||
this.Controls.Add(this.btnSave);
|
||||
this.Controls.Add(this.numWebPort);
|
||||
this.Controls.Add(this.lblWebPort);
|
||||
this.Controls.Add(this.txtDb);
|
||||
this.Controls.Add(this.lblDb);
|
||||
this.Controls.Add(this.txtPass);
|
||||
this.Controls.Add(this.lblPass);
|
||||
this.Controls.Add(this.txtUser);
|
||||
this.Controls.Add(this.lblUser);
|
||||
this.Controls.Add(this.numPort);
|
||||
this.Controls.Add(this.lblPort);
|
||||
this.Controls.Add(this.txtHost);
|
||||
this.Controls.Add(this.lblHost);
|
||||
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
|
||||
this.MaximizeBox = false;
|
||||
this.Name = "MainForm";
|
||||
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
|
||||
this.Text = "Gaseous Configurator";
|
||||
this.Load += new System.EventHandler(this.MainForm_Load);
|
||||
((System.ComponentModel.ISupportInitialize)(this.numPort)).EndInit();
|
||||
((System.ComponentModel.ISupportInitialize)(this.numWebPort)).EndInit();
|
||||
this.statusStrip.ResumeLayout(false);
|
||||
this.statusStrip.PerformLayout();
|
||||
this.ResumeLayout(false);
|
||||
this.PerformLayout();
|
||||
|
||||
}
|
||||
|
||||
private System.Windows.Forms.Label lblHost;
|
||||
private System.Windows.Forms.TextBox txtHost;
|
||||
private System.Windows.Forms.Label lblPort;
|
||||
private System.Windows.Forms.NumericUpDown numPort;
|
||||
private System.Windows.Forms.Label lblUser;
|
||||
private System.Windows.Forms.TextBox txtUser;
|
||||
private System.Windows.Forms.Label lblPass;
|
||||
private System.Windows.Forms.TextBox txtPass;
|
||||
private System.Windows.Forms.Label lblDb;
|
||||
private System.Windows.Forms.TextBox txtDb;
|
||||
private System.Windows.Forms.Button btnSave;
|
||||
private System.Windows.Forms.Label lblStatus;
|
||||
private System.Windows.Forms.Button btnStartService;
|
||||
private System.Windows.Forms.Label lblPath;
|
||||
private System.Windows.Forms.Button btnStopService;
|
||||
private System.Windows.Forms.Button btnRestartService;
|
||||
private System.Windows.Forms.Button btnRemoveService;
|
||||
private System.Windows.Forms.Button btnOpenLogs;
|
||||
private System.Windows.Forms.StatusStrip statusStrip;
|
||||
private System.Windows.Forms.ToolStripStatusLabel serviceStatusLabel;
|
||||
private System.Windows.Forms.ToolStripStatusLabel actionStatusLabel;
|
||||
private System.Windows.Forms.Label lblWebPort;
|
||||
private System.Windows.Forms.NumericUpDown numWebPort;
|
||||
private System.Windows.Forms.Button btnOpenBrowser;
|
||||
}
|
||||
}
|
||||
588
gaseous-configurator/MainForm.cs
Normal file
588
gaseous-configurator/MainForm.cs
Normal file
|
|
@ -0,0 +1,588 @@
|
|||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Drawing;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.ServiceProcess;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
using System.Windows.Forms;
|
||||
using gaseous_server.Classes;
|
||||
|
||||
namespace gaseous_configurator
|
||||
{
|
||||
public partial class MainForm : Form
|
||||
{
|
||||
private readonly System.Windows.Forms.Timer _statusTimer = new System.Windows.Forms.Timer();
|
||||
// Short service name (no spaces) for SCM APIs
|
||||
private const string ServiceName = "GaseousServer";
|
||||
private static readonly HttpClient _http = new HttpClient(new HttpClientHandler { AllowAutoRedirect = false })
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(2)
|
||||
};
|
||||
private bool _statusBusy;
|
||||
|
||||
public MainForm()
|
||||
{
|
||||
InitializeComponent();
|
||||
_statusTimer.Interval = 2000; // 2s
|
||||
_statusTimer.Tick += async (s, e) => await RefreshServiceStatusAsync();
|
||||
}
|
||||
|
||||
private void SetActionStatus(Color color, string message)
|
||||
{
|
||||
lblStatus.ForeColor = color;
|
||||
lblStatus.Text = message;
|
||||
actionStatusLabel.ForeColor = SystemColors.GrayText;
|
||||
actionStatusLabel.Text = message;
|
||||
}
|
||||
|
||||
private void MainForm_Load(object? sender, EventArgs e)
|
||||
{
|
||||
// Migrate legacy service name ("Gaseous Server") to new short name ("GaseousServer")
|
||||
try { MigrateLegacyServiceIfNeededAsync().GetAwaiter().GetResult(); } catch { }
|
||||
|
||||
// Display effective config path
|
||||
lblPath.Text = "Config: " + Config.ConfigurationPath;
|
||||
|
||||
// Load current DB config values
|
||||
txtHost.Text = Config.DatabaseConfiguration.HostName;
|
||||
numPort.Value = Config.DatabaseConfiguration.Port;
|
||||
txtUser.Text = Config.DatabaseConfiguration.UserName;
|
||||
txtPass.Text = Config.DatabaseConfiguration.Password;
|
||||
txtDb.Text = Config.DatabaseConfiguration.DatabaseName;
|
||||
// Load server web port
|
||||
try { numWebPort.Value = Math.Clamp(Config.ServerPort, 1, 65535); } catch { numWebPort.Value = 5198; }
|
||||
|
||||
// Start status polling
|
||||
_ = RefreshServiceStatusAsync();
|
||||
_statusTimer.Start();
|
||||
}
|
||||
|
||||
private async Task MigrateLegacyServiceIfNeededAsync()
|
||||
{
|
||||
const string oldName = "Gaseous Server";
|
||||
const string newName = ServiceName; // "GaseousServer"
|
||||
try
|
||||
{
|
||||
var services = ServiceController.GetServices();
|
||||
bool oldExists = services.Any(s => s.ServiceName.Equals(oldName, StringComparison.OrdinalIgnoreCase));
|
||||
bool newExists = services.Any(s => s.ServiceName.Equals(newName, StringComparison.OrdinalIgnoreCase));
|
||||
if (!oldExists || newExists) return;
|
||||
|
||||
SetActionStatus(Color.DarkGray, "Migrating service name...");
|
||||
|
||||
// Stop old if running
|
||||
try
|
||||
{
|
||||
using var scOld = new ServiceController(oldName);
|
||||
scOld.Refresh();
|
||||
if (scOld.Status == ServiceControllerStatus.Running || scOld.Status == ServiceControllerStatus.Paused)
|
||||
{
|
||||
try { scOld.Stop(); } catch { }
|
||||
try { scOld.WaitForStatus(ServiceControllerStatus.Stopped, TimeSpan.FromSeconds(30)); } catch { }
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
|
||||
// Locate gaseous-server.exe; prefer same directory as configurator
|
||||
var baseDir = System.IO.Path.GetDirectoryName(Application.ExecutablePath) ?? AppContext.BaseDirectory;
|
||||
string exePath = System.IO.Path.Combine(baseDir, "gaseous-server.exe");
|
||||
if (!System.IO.File.Exists(exePath))
|
||||
{
|
||||
var candidates = new[]
|
||||
{
|
||||
System.IO.Path.Combine(baseDir, "..", "gaseous-server", "bin", "Release", "net10.0", "gaseous-server.exe"),
|
||||
System.IO.Path.Combine(baseDir, "..", "gaseous-server", "bin", "Debug", "net10.0", "gaseous-server.exe")
|
||||
};
|
||||
exePath = candidates.FirstOrDefault(System.IO.File.Exists) ?? exePath;
|
||||
}
|
||||
|
||||
// Create new short-name service if binary is present
|
||||
if (System.IO.File.Exists(exePath))
|
||||
{
|
||||
var scCreate = new System.Diagnostics.ProcessStartInfo("sc.exe", $"create \"{newName}\" binPath= \"{exePath}\" start= auto DisplayName= \"Gaseous Server\"")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
using (var p2 = System.Diagnostics.Process.Start(scCreate))
|
||||
{
|
||||
if (p2 != null)
|
||||
{
|
||||
await p2.WaitForExitAsync();
|
||||
// ignore non-zero; we'll still attempt to delete old
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete legacy service name
|
||||
try
|
||||
{
|
||||
var del = new System.Diagnostics.ProcessStartInfo("sc.exe", $"delete \"{oldName}\"")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
using (var p = System.Diagnostics.Process.Start(del))
|
||||
{
|
||||
if (p != null) await p.WaitForExitAsync();
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
|
||||
SetActionStatus(Color.DarkGreen, "Service name migrated");
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
private void btnSave_Click(object? sender, EventArgs e)
|
||||
{
|
||||
// Update config and save
|
||||
Config.DatabaseConfiguration.HostName = txtHost.Text.Trim();
|
||||
Config.DatabaseConfiguration.Port = (int)numPort.Value;
|
||||
Config.DatabaseConfiguration.UserName = txtUser.Text.Trim();
|
||||
Config.DatabaseConfiguration.Password = txtPass.Text;
|
||||
Config.DatabaseConfiguration.DatabaseName = txtDb.Text.Trim();
|
||||
Config.ServerPort = (int)numWebPort.Value;
|
||||
|
||||
try
|
||||
{
|
||||
Config.UpdateConfig();
|
||||
SetActionStatus(Color.DarkGreen, "Saved config to " + Config.ConfigurationPath);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
SetActionStatus(Color.DarkRed, "Failed to save: " + ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
// Start or install the gaseous-server Windows service by referencing an existing executable
|
||||
private async void btnStartService_Click(object? sender, EventArgs e)
|
||||
{
|
||||
ServiceController? sc = null;
|
||||
try
|
||||
{
|
||||
sc = ServiceController.GetServices().FirstOrDefault(s => s.ServiceName.Equals(ServiceName, StringComparison.OrdinalIgnoreCase));
|
||||
if (sc == null)
|
||||
{
|
||||
SetActionStatus(Color.DarkGray, "Installing service...");
|
||||
|
||||
// Locate gaseous-server.exe; prefer the same directory as the configurator
|
||||
var baseDir = System.IO.Path.GetDirectoryName(Application.ExecutablePath) ?? AppContext.BaseDirectory;
|
||||
string exePath = System.IO.Path.Combine(baseDir, "gaseous-server.exe");
|
||||
|
||||
if (!System.IO.File.Exists(exePath))
|
||||
{
|
||||
// Try common dev locations as a convenience
|
||||
var candidates = new[]
|
||||
{
|
||||
System.IO.Path.Combine(baseDir, "..", "gaseous-server", "bin", "Release", "net10.0", "gaseous-server.exe"),
|
||||
System.IO.Path.Combine(baseDir, "..", "gaseous-server", "bin", "Debug", "net10.0", "gaseous-server.exe")
|
||||
};
|
||||
exePath = candidates.FirstOrDefault(System.IO.File.Exists) ?? exePath;
|
||||
}
|
||||
|
||||
if (!System.IO.File.Exists(exePath))
|
||||
{
|
||||
using var ofd = new OpenFileDialog
|
||||
{
|
||||
Title = "Locate gaseous-server.exe",
|
||||
Filter = "gaseous-server.exe|gaseous-server.exe|Executable (*.exe)|*.exe",
|
||||
CheckFileExists = true,
|
||||
Multiselect = false
|
||||
};
|
||||
if (ofd.ShowDialog(this) == DialogResult.OK)
|
||||
{
|
||||
exePath = ofd.FileName;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception("Server executable not found. Please install gaseous-server and try again.");
|
||||
}
|
||||
}
|
||||
|
||||
// Create the service via sc.exe create referencing the existing executable
|
||||
// Create with short name but friendly display name
|
||||
var scCreate = new System.Diagnostics.ProcessStartInfo("sc.exe", $"create \"{ServiceName}\" binPath= \"{exePath}\" start= auto DisplayName= \"Gaseous Server\"")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
using (var p2 = System.Diagnostics.Process.Start(scCreate))
|
||||
{
|
||||
if (p2 == null) throw new Exception("Failed to start service install");
|
||||
await p2.WaitForExitAsync();
|
||||
if (p2.ExitCode != 0)
|
||||
{
|
||||
var err = await p2.StandardError.ReadToEndAsync();
|
||||
throw new Exception("Service install failed: " + err);
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh controller
|
||||
sc = new ServiceController(ServiceName);
|
||||
}
|
||||
|
||||
// Start service if not running
|
||||
sc.Refresh();
|
||||
if (sc.Status == ServiceControllerStatus.Stopped || sc.Status == ServiceControllerStatus.Paused)
|
||||
{
|
||||
SetActionStatus(Color.DarkGray, "Starting service...");
|
||||
sc.Start();
|
||||
sc.WaitForStatus(ServiceControllerStatus.Running, TimeSpan.FromSeconds(30));
|
||||
}
|
||||
|
||||
SetActionStatus(Color.DarkGreen, "Service running");
|
||||
await RefreshServiceStatusAsync();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
SetActionStatus(Color.DarkRed, "Service action failed: " + ex.Message);
|
||||
}
|
||||
finally
|
||||
{
|
||||
sc?.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private void btnStopService_Click(object? sender, EventArgs e)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var sc = new ServiceController(ServiceName);
|
||||
sc.Refresh();
|
||||
if (sc.Status == ServiceControllerStatus.Running || sc.Status == ServiceControllerStatus.Paused)
|
||||
{
|
||||
SetActionStatus(Color.DarkGray, "Stopping service...");
|
||||
sc.Stop();
|
||||
sc.WaitForStatus(ServiceControllerStatus.Stopped, TimeSpan.FromSeconds(30));
|
||||
}
|
||||
SetActionStatus(Color.DarkGreen, "Service stopped");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
SetActionStatus(Color.DarkRed, "Stop failed: " + ex.Message);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_ = RefreshServiceStatusAsync();
|
||||
}
|
||||
}
|
||||
|
||||
private void btnRestartService_Click(object? sender, EventArgs e)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var sc = new ServiceController(ServiceName);
|
||||
sc.Refresh();
|
||||
SetActionStatus(Color.DarkGray, "Restarting service...");
|
||||
if (sc.Status == ServiceControllerStatus.Running || sc.Status == ServiceControllerStatus.Paused)
|
||||
{
|
||||
sc.Stop();
|
||||
sc.WaitForStatus(ServiceControllerStatus.Stopped, TimeSpan.FromSeconds(30));
|
||||
}
|
||||
sc.Start();
|
||||
sc.WaitForStatus(ServiceControllerStatus.Running, TimeSpan.FromSeconds(30));
|
||||
SetActionStatus(Color.DarkGreen, "Service running");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
SetActionStatus(Color.DarkRed, "Restart failed: " + ex.Message);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_ = RefreshServiceStatusAsync();
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RefreshServiceStatusAsync()
|
||||
{
|
||||
if (_statusBusy) return;
|
||||
_statusBusy = true;
|
||||
try
|
||||
{
|
||||
using var sc = new ServiceController(ServiceName);
|
||||
sc.Refresh();
|
||||
var status = sc.Status;
|
||||
string text;
|
||||
|
||||
// Reflect buttons enabled state
|
||||
btnStartService.Enabled = status == ServiceControllerStatus.Stopped || status == ServiceControllerStatus.Paused;
|
||||
btnStopService.Enabled = status == ServiceControllerStatus.Running || status == ServiceControllerStatus.Paused;
|
||||
btnRestartService.Enabled = status == ServiceControllerStatus.Running || status == ServiceControllerStatus.Paused;
|
||||
|
||||
// Settings should be read-only while the service is running. Also lock while transitioning.
|
||||
var allowEdit = status == ServiceControllerStatus.Stopped;
|
||||
SetConfigInputsEnabled(allowEdit);
|
||||
|
||||
// Status text mapping per requirement
|
||||
if (status == ServiceControllerStatus.StartPending)
|
||||
{
|
||||
text = "Starting";
|
||||
}
|
||||
else if (status == ServiceControllerStatus.Running)
|
||||
{
|
||||
// Try to find listening port for the service process
|
||||
var pid = GetServiceProcessId(ServiceName);
|
||||
int? port = null;
|
||||
if (pid.HasValue && pid.Value > 0)
|
||||
{
|
||||
port = await TryGetListeningPortAsync(pid.Value);
|
||||
}
|
||||
|
||||
if (port.HasValue)
|
||||
{
|
||||
// Probe health endpoint
|
||||
var ready = await IsHostReadyAsync(port.Value);
|
||||
text = ready ? $"Started - Port {port.Value}" : "Started - waiting for host";
|
||||
}
|
||||
else
|
||||
{
|
||||
text = "Started - waiting for host";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
text = status.ToString();
|
||||
}
|
||||
|
||||
// Show status in status bar
|
||||
serviceStatusLabel.ForeColor = SystemColors.GrayText;
|
||||
serviceStatusLabel.Text = "Service: " + text;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Service likely not installed
|
||||
btnStartService.Enabled = true;
|
||||
btnStopService.Enabled = false;
|
||||
btnRestartService.Enabled = false;
|
||||
SetConfigInputsEnabled(true);
|
||||
serviceStatusLabel.ForeColor = SystemColors.GrayText;
|
||||
serviceStatusLabel.Text = "Service: Not installed";
|
||||
}
|
||||
finally
|
||||
{
|
||||
_statusBusy = false;
|
||||
}
|
||||
}
|
||||
|
||||
private async void btnRemoveService_Click(object? sender, EventArgs e)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Stop first if running
|
||||
using (var sc = new ServiceController(ServiceName))
|
||||
{
|
||||
try
|
||||
{
|
||||
sc.Refresh();
|
||||
if (sc.Status == ServiceControllerStatus.Running || sc.Status == ServiceControllerStatus.Paused)
|
||||
{
|
||||
SetActionStatus(Color.DarkGray, "Stopping service...");
|
||||
sc.Stop();
|
||||
// Wait for a clean stop with a generous timeout
|
||||
await WaitForServiceStoppedAsync(TimeSpan.FromMinutes(2));
|
||||
}
|
||||
else if (sc.Status == ServiceControllerStatus.StopPending || sc.Status == ServiceControllerStatus.StartPending)
|
||||
{
|
||||
SetActionStatus(Color.DarkGray, "Waiting for service to stop...");
|
||||
await WaitForServiceStoppedAsync(TimeSpan.FromMinutes(2));
|
||||
}
|
||||
}
|
||||
catch { /* ignore if not installed */ }
|
||||
}
|
||||
|
||||
// sc.exe delete
|
||||
var psi = new System.Diagnostics.ProcessStartInfo("sc.exe", $"delete \"{ServiceName}\"")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
using (var p = System.Diagnostics.Process.Start(psi))
|
||||
{
|
||||
if (p == null) throw new Exception("Failed to start service removal");
|
||||
p.WaitForExit();
|
||||
if (p.ExitCode != 0)
|
||||
{
|
||||
var err = p.StandardError.ReadToEnd();
|
||||
throw new Exception("Remove failed: " + err);
|
||||
}
|
||||
}
|
||||
|
||||
SetActionStatus(Color.DarkGreen, "Service removed");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
SetActionStatus(Color.DarkRed, ex.Message);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await RefreshServiceStatusAsync();
|
||||
}
|
||||
}
|
||||
|
||||
private void btnOpenLogs_Click(object? sender, EventArgs e)
|
||||
{
|
||||
try
|
||||
{
|
||||
var logs = System.IO.Path.Combine(Config.LogPath);
|
||||
if (!System.IO.Directory.Exists(logs))
|
||||
{
|
||||
System.IO.Directory.CreateDirectory(logs);
|
||||
}
|
||||
System.Diagnostics.Process.Start(new System.Diagnostics.ProcessStartInfo
|
||||
{
|
||||
FileName = logs,
|
||||
UseShellExecute = true,
|
||||
Verb = "open"
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
SetActionStatus(Color.DarkRed, "Open logs failed: " + ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private void SetConfigInputsEnabled(bool enabled)
|
||||
{
|
||||
txtHost.ReadOnly = !enabled;
|
||||
txtUser.ReadOnly = !enabled;
|
||||
txtPass.ReadOnly = !enabled;
|
||||
txtDb.ReadOnly = !enabled;
|
||||
numPort.Enabled = enabled;
|
||||
numWebPort.Enabled = enabled;
|
||||
btnSave.Enabled = enabled;
|
||||
}
|
||||
|
||||
private void btnOpenBrowser_Click(object? sender, EventArgs e)
|
||||
{
|
||||
try
|
||||
{
|
||||
var port = (int)numWebPort.Value;
|
||||
var url = $"http://localhost:{port}/";
|
||||
Process.Start(new ProcessStartInfo
|
||||
{
|
||||
FileName = url,
|
||||
UseShellExecute = true
|
||||
});
|
||||
SetActionStatus(Color.Gray, $"Launching {url}");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
SetActionStatus(Color.DarkRed, "Open web failed: " + ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> IsHostReadyAsync(int port)
|
||||
{
|
||||
try
|
||||
{
|
||||
var url = $"http://localhost:{port}/api/v1.1/HealthCheck";
|
||||
using var req = new HttpRequestMessage(HttpMethod.Get, url);
|
||||
using var resp = await _http.SendAsync(req);
|
||||
var code = (int)resp.StatusCode;
|
||||
// Consider 2xx and 3xx as "ready" (many setups redirect HTTP->HTTPS)
|
||||
return code >= 200 && code < 400;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int?> TryGetListeningPortAsync(int pid)
|
||||
{
|
||||
try
|
||||
{
|
||||
var psi = new ProcessStartInfo("netstat.exe", "-ano -p tcp")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
using var p = Process.Start(psi);
|
||||
if (p == null) return null;
|
||||
var output = await p.StandardOutput.ReadToEndAsync();
|
||||
await p.WaitForExitAsync();
|
||||
|
||||
var lines = output.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
var re = new Regex("^\\s*TCP\\s+(\\S+):(\\d+)\\s+(\\S+):(\\*|\\d+)\\s+LISTENING\\s+(\\d+)\\s*$", RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
var candidates = lines
|
||||
.Select(l => re.Match(l))
|
||||
.Where(m => m.Success && int.TryParse(m.Groups[5].Value, out var pidCol) && pidCol == pid)
|
||||
.Select(m => new { Local = m.Groups[1].Value, Port = int.Parse(m.Groups[2].Value) })
|
||||
.ToList();
|
||||
if (!candidates.Any()) return null;
|
||||
|
||||
// Prefer 5000/5001 if present, else lowest port
|
||||
var preferred = candidates.FirstOrDefault(c => c.Port == 5000) ?? candidates.FirstOrDefault(c => c.Port == 5001);
|
||||
if (preferred != null) return preferred.Port;
|
||||
return candidates.Min(c => c.Port);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task WaitForServiceStoppedAsync(TimeSpan timeout)
|
||||
{
|
||||
var start = DateTime.UtcNow;
|
||||
while (DateTime.UtcNow - start < timeout)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var sc = new ServiceController(ServiceName);
|
||||
sc.Refresh();
|
||||
if (sc.Status == ServiceControllerStatus.Stopped)
|
||||
{
|
||||
return;
|
||||
}
|
||||
await Task.Delay(1000);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Service might be gone already
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int? GetServiceProcessId(string name)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Use SC to query the service extended info and parse PID
|
||||
var psi = new ProcessStartInfo("sc.exe", $"queryex \"{name}\"")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
using var p = Process.Start(psi);
|
||||
if (p == null) return null;
|
||||
var output = p.StandardOutput.ReadToEnd();
|
||||
p.WaitForExit();
|
||||
// Look for a line like: PID : 1234
|
||||
var m = Regex.Match(output, @"PID\s*:\s*(\d+)", RegexOptions.IgnoreCase);
|
||||
if (m.Success && int.TryParse(m.Groups[1].Value, out var pid)) return pid;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
113
gaseous-configurator/Program.cs
Normal file
113
gaseous-configurator/Program.cs
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Security.Principal;
|
||||
using System.Threading;
|
||||
using System.Windows.Forms;
|
||||
|
||||
namespace gaseous_configurator
|
||||
{
|
||||
internal static class Program
|
||||
{
|
||||
private static Mutex? _singleInstanceMutex;
|
||||
|
||||
private static bool IsAdministrator()
|
||||
{
|
||||
try
|
||||
{
|
||||
var wi = WindowsIdentity.GetCurrent();
|
||||
var wp = new WindowsPrincipal(wi);
|
||||
return wp.IsInRole(WindowsBuiltInRole.Administrator);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
[STAThread]
|
||||
static void Main()
|
||||
{
|
||||
// Elevate if not running as admin
|
||||
if (OperatingSystem.IsWindows() && !IsAdministrator())
|
||||
{
|
||||
try
|
||||
{
|
||||
var exe = Application.ExecutablePath;
|
||||
var psi = new ProcessStartInfo(exe)
|
||||
{
|
||||
UseShellExecute = true,
|
||||
Verb = "runas"
|
||||
};
|
||||
Process.Start(psi);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// user cancelled UAC or failed; exit silently
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure both configurator and service use a shared config path under ProgramData
|
||||
EnsureSharedConfigPath();
|
||||
|
||||
// Single instance guard
|
||||
bool createdNew;
|
||||
_singleInstanceMutex = new Mutex(initiallyOwned: true, name: "Global\\GaseousConfigurator_SingleInstance", createdNew: out createdNew);
|
||||
if (!createdNew)
|
||||
{
|
||||
// Another instance is running; exit quietly
|
||||
return;
|
||||
}
|
||||
|
||||
ApplicationConfiguration.Initialize();
|
||||
try
|
||||
{
|
||||
Application.Run(new MainForm());
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Release single-instance mutex on exit
|
||||
_singleInstanceMutex?.ReleaseMutex();
|
||||
_singleInstanceMutex?.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private static void EnsureSharedConfigPath()
|
||||
{
|
||||
try
|
||||
{
|
||||
var sharedPath = System.IO.Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), "gaseous-server");
|
||||
|
||||
// Set for current process immediately so Config uses it this run
|
||||
Environment.SetEnvironmentVariable("GASEOUS_CONFIG_PATH", sharedPath, EnvironmentVariableTarget.Process);
|
||||
// Persist for the machine so the Windows service picks it up
|
||||
Environment.SetEnvironmentVariable("GASEOUS_CONFIG_PATH", sharedPath, EnvironmentVariableTarget.Machine);
|
||||
|
||||
if (!System.IO.Directory.Exists(sharedPath))
|
||||
{
|
||||
System.IO.Directory.CreateDirectory(sharedPath);
|
||||
}
|
||||
|
||||
// Best-effort migration from prior per-user location if present and shared is missing
|
||||
var userPath = System.IO.Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".gaseous-server");
|
||||
var userConfig = System.IO.Path.Combine(userPath, "config.json");
|
||||
var userPlatformMap = System.IO.Path.Combine(userPath, "platformmap.json");
|
||||
var sharedConfig = System.IO.Path.Combine(sharedPath, "config.json");
|
||||
var sharedPlatformMap = System.IO.Path.Combine(sharedPath, "platformmap.json");
|
||||
|
||||
if (System.IO.File.Exists(userConfig) && !System.IO.File.Exists(sharedConfig))
|
||||
{
|
||||
System.IO.File.Copy(userConfig, sharedConfig, overwrite: false);
|
||||
}
|
||||
if (System.IO.File.Exists(userPlatformMap) && !System.IO.File.Exists(sharedPlatformMap))
|
||||
{
|
||||
System.IO.File.Copy(userPlatformMap, sharedPlatformMap, overwrite: false);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Non-fatal: if setting env var fails, Config will fall back to per-user path
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
gaseous-configurator/gaseous-configurator.csproj
Normal file
16
gaseous-configurator/gaseous-configurator.csproj
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
|
||||
<PropertyGroup>
|
||||
<OutputType>WinExe</OutputType>
|
||||
<TargetFramework>net10.0-windows</TargetFramework>
|
||||
<UseWindowsForms>true</UseWindowsForms>
|
||||
<EnableWindowsTargeting>true</EnableWindowsTargeting>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<ApplicationIcon>..\gaseous-server\wwwroot\favicon.ico</ApplicationIcon>
|
||||
<!-- Allow RID-specific publishes (e.g., win-x64) when invoked from gaseous-server publish -->
|
||||
<RuntimeIdentifiers>win-x64;win-arm64</RuntimeIdentifiers>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../gaseous-lib/gaseous-lib.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
|
@ -12,6 +12,6 @@ namespace Authentication
|
|||
{
|
||||
public SecurityProfileViewModel SecurityProfile { get; set; }
|
||||
public List<UserPreferenceViewModel> UserPreferences { get; set; }
|
||||
public Guid Avatar { get; set; }
|
||||
public Guid ProfileId { get; set; }
|
||||
}
|
||||
}
|
||||
|
|
@ -9,7 +9,7 @@ namespace Authentication
|
|||
/// <summary>
|
||||
/// Class that represents the Role table in the MySQL Database
|
||||
/// </summary>
|
||||
public class RoleTable
|
||||
public class RoleTable
|
||||
{
|
||||
private Database _database;
|
||||
|
||||
|
|
@ -63,7 +63,7 @@ namespace Authentication
|
|||
parameters.Add("@id", roleId);
|
||||
|
||||
DataTable table = _database.ExecuteCMD(commandText, parameters);
|
||||
|
||||
|
||||
if (table.Rows.Count == 0)
|
||||
{
|
||||
return null;
|
||||
|
|
@ -104,7 +104,7 @@ namespace Authentication
|
|||
var roleName = GetRoleName(roleId);
|
||||
ApplicationRole? role = null;
|
||||
|
||||
if(roleName != null)
|
||||
if (roleName != null)
|
||||
{
|
||||
role = new ApplicationRole();
|
||||
role.Id = roleId;
|
||||
|
|
@ -153,7 +153,7 @@ namespace Authentication
|
|||
string commandText = "Select Name from Roles";
|
||||
|
||||
var rows = _database.ExecuteCMDDict(commandText);
|
||||
foreach(Dictionary<string, object> row in rows)
|
||||
foreach (Dictionary<string, object> row in rows)
|
||||
{
|
||||
ApplicationRole role = (ApplicationRole)Activator.CreateInstance(typeof(ApplicationRole));
|
||||
role.Id = (string)row["Id"];
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
using gaseous_server.Classes;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
|
||||
namespace Authentication
|
||||
{
|
||||
/// <summary>
|
||||
/// Access to UserAuthenticatorKeys table (MariaDB/MySQL)
|
||||
/// </summary>
|
||||
public class UserAuthenticatorKeysTable
|
||||
{
|
||||
private readonly Database _database;
|
||||
|
||||
public UserAuthenticatorKeysTable(Database database)
|
||||
{
|
||||
_database = database;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the authenticator key for a user (null if none).
|
||||
/// </summary>
|
||||
public string? GetKey(string userId)
|
||||
{
|
||||
const string sql = "SELECT AuthenticatorKey FROM UserAuthenticatorKeys WHERE UserId=@uid";
|
||||
var dict = new Dictionary<string, object> { { "uid", userId } };
|
||||
DataTable dt = _database.ExecuteCMD(sql, dict);
|
||||
if (dt.Rows.Count == 0) return null;
|
||||
return (string)dt.Rows[0][0];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Upsert the authenticator key for a user.
|
||||
/// </summary>
|
||||
public void SetKey(string userId, string key)
|
||||
{
|
||||
const string sql = "REPLACE INTO UserAuthenticatorKeys (UserId, AuthenticatorKey) VALUES (@uid, @key)";
|
||||
var dict = new Dictionary<string, object> { { "uid", userId }, { "key", key } };
|
||||
_database.ExecuteNonQuery(sql, dict);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove the authenticator key for a user.
|
||||
/// </summary>
|
||||
public void DeleteKey(string userId)
|
||||
{
|
||||
const string sql = "DELETE FROM UserAuthenticatorKeys WHERE UserId=@uid";
|
||||
var dict = new Dictionary<string, object> { { "uid", userId } };
|
||||
_database.ExecuteNonQuery(sql, dict);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -82,7 +82,7 @@ namespace Authentication
|
|||
parameters.Add("providerKey", userLogin.ProviderKey);
|
||||
|
||||
DataTable table = _database.ExecuteCMD(commandText, parameters);
|
||||
|
||||
|
||||
if (table.Rows.Count == 0)
|
||||
{
|
||||
return null;
|
||||
71
gaseous-lib/Classes/Auth/Classes/UserRecoveryCodesTable.cs
Normal file
71
gaseous-lib/Classes/Auth/Classes/UserRecoveryCodesTable.cs
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
using gaseous_server.Classes;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
|
||||
namespace Authentication
|
||||
{
|
||||
/// <summary>
|
||||
/// Access to UserRecoveryCodes table (MariaDB/MySQL)
|
||||
/// </summary>
|
||||
public class UserRecoveryCodesTable
|
||||
{
|
||||
private readonly Database _database;
|
||||
|
||||
/// <summary>
|
||||
/// Ctor with database dependency.
|
||||
/// </summary>
|
||||
public UserRecoveryCodesTable(Database database)
|
||||
{
|
||||
_database = database;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Count codes for a user.
|
||||
/// </summary>
|
||||
public int CountCodes(string userId)
|
||||
{
|
||||
const string sql = "SELECT COUNT(*) FROM UserRecoveryCodes WHERE UserId=@uid";
|
||||
var dict = new Dictionary<string, object> { { "uid", userId } };
|
||||
DataTable dt = _database.ExecuteCMD(sql, dict);
|
||||
if (dt.Rows.Count == 0) return 0;
|
||||
var val = dt.Rows[0][0]?.ToString();
|
||||
return int.TryParse(val, out var n) ? n : 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Delete a matching code and return true if removed.
|
||||
/// </summary>
|
||||
public bool RedeemCode(string userId, string codeHash)
|
||||
{
|
||||
const string delSql = "DELETE FROM UserRecoveryCodes WHERE UserId=@uid AND CodeHash=@code";
|
||||
var dict = new Dictionary<string, object> { { "uid", userId }, { "code", codeHash } };
|
||||
var affected = _database.ExecuteNonQuery(delSql, dict);
|
||||
return affected > 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replace all codes for a user with a new set (hashed).
|
||||
/// </summary>
|
||||
public void ReplaceCodes(string userId, IEnumerable<string> codeHashes)
|
||||
{
|
||||
// Execute delete + inserts atomically to avoid partial state.
|
||||
var txItems = new List<Database.SQLTransactionItem>();
|
||||
|
||||
// clear existing
|
||||
txItems.Add(new Database.SQLTransactionItem(
|
||||
"DELETE FROM UserRecoveryCodes WHERE UserId=@uid",
|
||||
new Dictionary<string, object> { { "uid", userId } }
|
||||
));
|
||||
|
||||
foreach (var code in codeHashes)
|
||||
{
|
||||
txItems.Add(new Database.SQLTransactionItem(
|
||||
"INSERT INTO UserRecoveryCodes (UserId, CodeHash) VALUES (@uid, @code)",
|
||||
new Dictionary<string, object> { { "uid", userId }, { "code", code } }
|
||||
));
|
||||
}
|
||||
|
||||
_database.ExecuteTransactionCMD(txItems);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -35,7 +35,7 @@ namespace Authentication
|
|||
parameters.Add("@userId", userId);
|
||||
|
||||
var rows = _database.ExecuteCMD(commandText, parameters).Rows;
|
||||
foreach(DataRow row in rows)
|
||||
foreach (DataRow row in rows)
|
||||
{
|
||||
roles.Add((string)row["Name"]);
|
||||
}
|
||||
|
|
@ -7,7 +7,7 @@ using MySqlConnector;
|
|||
|
||||
namespace Authentication
|
||||
{
|
||||
public class UserStore :
|
||||
public class UserStore :
|
||||
IUserStore<ApplicationUser>,
|
||||
IUserRoleStore<ApplicationUser>,
|
||||
IUserLoginStore<ApplicationUser>,
|
||||
|
|
@ -18,7 +18,9 @@ namespace Authentication
|
|||
IUserEmailStore<ApplicationUser>,
|
||||
IUserPhoneNumberStore<ApplicationUser>,
|
||||
IUserTwoFactorStore<ApplicationUser>,
|
||||
IUserLockoutStore<ApplicationUser>
|
||||
IUserLockoutStore<ApplicationUser>,
|
||||
IUserAuthenticatorKeyStore<ApplicationUser>,
|
||||
IUserTwoFactorRecoveryCodeStore<ApplicationUser>
|
||||
{
|
||||
private Database database;
|
||||
|
||||
|
|
@ -27,6 +29,8 @@ namespace Authentication
|
|||
private UserRolesTable userRolesTable;
|
||||
private UserLoginsTable userLoginsTable;
|
||||
private UserClaimsTable userClaimsTable;
|
||||
private UserAuthenticatorKeysTable userAuthenticatorKeysTable;
|
||||
private UserRecoveryCodesTable userRecoveryCodesTable;
|
||||
|
||||
public UserStore()
|
||||
{
|
||||
|
|
@ -36,6 +40,8 @@ namespace Authentication
|
|||
userRolesTable = new UserRolesTable(database);
|
||||
userLoginsTable = new UserLoginsTable(database);
|
||||
userClaimsTable = new UserClaimsTable(database);
|
||||
userAuthenticatorKeysTable = new UserAuthenticatorKeysTable(database);
|
||||
userRecoveryCodesTable = new UserRecoveryCodesTable(database);
|
||||
}
|
||||
|
||||
public UserStore(Database database)
|
||||
|
|
@ -46,6 +52,8 @@ namespace Authentication
|
|||
userRolesTable = new UserRolesTable(database);
|
||||
userLoginsTable = new UserLoginsTable(database);
|
||||
userClaimsTable = new UserClaimsTable(database);
|
||||
userAuthenticatorKeysTable = new UserAuthenticatorKeysTable(database);
|
||||
userRecoveryCodesTable = new UserRecoveryCodesTable(database);
|
||||
}
|
||||
|
||||
public IQueryable<ApplicationUser> Users
|
||||
|
|
@ -206,7 +214,7 @@ namespace Authentication
|
|||
throw new ArgumentException("Null or empty argument: normalizedUserName");
|
||||
}
|
||||
|
||||
List<ApplicationUser> result = userTable.GetUserByName(normalizedUserName) as List<ApplicationUser>;
|
||||
List<ApplicationUser> result = userTable.GetUserByName(normalizedUserName, false) as List<ApplicationUser>;
|
||||
|
||||
// Should I throw if > 1 user?
|
||||
if (result != null && result.Count == 1)
|
||||
|
|
@ -612,5 +620,42 @@ namespace Authentication
|
|||
|
||||
return Task.FromResult<IdentityResult>(IdentityResult.Success);
|
||||
}
|
||||
|
||||
public Task<string?> GetAuthenticatorKeyAsync(ApplicationUser user, CancellationToken cancellationToken)
|
||||
{
|
||||
if (user == null) throw new ArgumentNullException(nameof(user));
|
||||
return Task.FromResult(userAuthenticatorKeysTable.GetKey(user.Id));
|
||||
}
|
||||
|
||||
public Task SetAuthenticatorKeyAsync(ApplicationUser user, string key, CancellationToken cancellationToken)
|
||||
{
|
||||
if (user == null) throw new ArgumentNullException(nameof(user));
|
||||
userAuthenticatorKeysTable.SetKey(user.Id, key);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task ReplaceCodesAsync(ApplicationUser user, IEnumerable<string> recoveryCodes, CancellationToken cancellationToken)
|
||||
{
|
||||
if (user == null) throw new ArgumentNullException(nameof(user));
|
||||
if (recoveryCodes == null) throw new ArgumentNullException(nameof(recoveryCodes));
|
||||
// Store hashed codes; Identity passes hashed strings here.
|
||||
userRecoveryCodesTable.ReplaceCodes(user.Id, recoveryCodes);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<bool> RedeemCodeAsync(ApplicationUser user, string code, CancellationToken cancellationToken)
|
||||
{
|
||||
if (user == null) throw new ArgumentNullException(nameof(user));
|
||||
if (code == null) throw new ArgumentNullException(nameof(code));
|
||||
bool ok = userRecoveryCodesTable.RedeemCode(user.Id, code);
|
||||
return Task.FromResult(ok);
|
||||
}
|
||||
|
||||
public Task<int> CountCodesAsync(ApplicationUser user, CancellationToken cancellationToken)
|
||||
{
|
||||
if (user == null) throw new ArgumentNullException(nameof(user));
|
||||
int count = userRecoveryCodesTable.CountCodes(user.Id);
|
||||
return Task.FromResult(count);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -10,7 +10,7 @@ namespace Authentication
|
|||
/// Class that represents the Users table in the MySQL Database
|
||||
/// </summary>
|
||||
public class UserTable<TUser>
|
||||
where TUser :ApplicationUser
|
||||
where TUser : ApplicationUser
|
||||
{
|
||||
private Database _database;
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ namespace Authentication
|
|||
Dictionary<string, object> parameters = new Dictionary<string, object>() { { "@id", userId } };
|
||||
|
||||
DataTable table = _database.ExecuteCMD(commandText, parameters);
|
||||
|
||||
|
||||
if (table.Rows.Count == 0)
|
||||
{
|
||||
return null;
|
||||
|
|
@ -75,7 +75,7 @@ namespace Authentication
|
|||
public TUser GetUserById(string userId)
|
||||
{
|
||||
TUser user = null;
|
||||
string commandText = "Select * from Users LEFT JOIN (SELECT UserId, Id AS AvatarId FROM UserAvatars) UserAvatars ON Users.Id = UserAvatars.UserId where Id = @id";
|
||||
string commandText = "Select * from Users LEFT JOIN (SELECT Id As ProfileId, UserId FROM UserProfiles) UserProfiles ON Users.Id = UserProfiles.UserId where Id = @id";
|
||||
Dictionary<string, object> parameters = new Dictionary<string, object>() { { "@id", userId } };
|
||||
|
||||
var rows = _database.ExecuteCMDDict(commandText, parameters);
|
||||
|
|
@ -89,7 +89,7 @@ namespace Authentication
|
|||
user.SecurityStamp = (string?)(string.IsNullOrEmpty((string?)row["SecurityStamp"]) ? null : row["SecurityStamp"]);
|
||||
user.ConcurrencyStamp = (string?)(string.IsNullOrEmpty((string?)row["ConcurrencyStamp"]) ? null : row["ConcurrencyStamp"]);
|
||||
user.Email = (string?)(string.IsNullOrEmpty((string?)row["Email"]) ? null : row["Email"]);
|
||||
user.EmailConfirmed = row["EmailConfirmed"] == "1" ? true:false;
|
||||
user.EmailConfirmed = row["EmailConfirmed"] == "1" ? true : false;
|
||||
user.PhoneNumber = (string?)(string.IsNullOrEmpty((string?)row["PhoneNumber"]) ? null : row["PhoneNumber"]);
|
||||
user.PhoneNumberConfirmed = row["PhoneNumberConfirmed"] == "1" ? true : false;
|
||||
user.NormalizedEmail = (string?)(string.IsNullOrEmpty((string?)row["NormalizedEmail"]) ? null : row["NormalizedEmail"]);
|
||||
|
|
@ -97,10 +97,12 @@ namespace Authentication
|
|||
user.LockoutEnabled = row["LockoutEnabled"] == "1" ? true : false;
|
||||
user.LockoutEnd = string.IsNullOrEmpty((string?)row["LockoutEnd"]) ? DateTime.Now : DateTime.Parse((string?)row["LockoutEnd"]);
|
||||
user.AccessFailedCount = string.IsNullOrEmpty((string?)row["AccessFailedCount"]) ? 0 : int.Parse((string?)row["AccessFailedCount"]);
|
||||
user.TwoFactorEnabled = row["TwoFactorEnabled"] == "1" ? true:false;
|
||||
// Handle both "1"/"0" and "true"/"false" (case-insensitive) for TwoFactorEnabled
|
||||
var twoFactorValue = row["TwoFactorEnabled"]?.ToString()?.ToLowerInvariant();
|
||||
user.TwoFactorEnabled = twoFactorValue == "1" || twoFactorValue == "true";
|
||||
user.SecurityProfile = GetSecurityProfile(user);
|
||||
user.UserPreferences = GetPreferences(user);
|
||||
user.Avatar = string.IsNullOrEmpty((string?)row["AvatarId"]) ? Guid.Empty : Guid.Parse((string?)row["AvatarId"]);
|
||||
user.ProfileId = string.IsNullOrEmpty((string?)row["ProfileId"]) ? Guid.Empty : Guid.Parse((string?)row["ProfileId"]);
|
||||
}
|
||||
|
||||
return user;
|
||||
|
|
@ -111,14 +113,18 @@ namespace Authentication
|
|||
/// </summary>
|
||||
/// <param name="normalizedUserName">User's name</param>
|
||||
/// <returns></returns>
|
||||
public List<TUser> GetUserByName(string normalizedUserName)
|
||||
public List<TUser> GetUserByName(string normalizedUserName, bool searchAsEmail)
|
||||
{
|
||||
List<TUser> users = new List<TUser>();
|
||||
string commandText = "Select * from Users LEFT JOIN (SELECT UserId, Id AS AvatarId FROM UserAvatars) UserAvatars ON Users.Id = UserAvatars.UserId where NormalizedEmail = @name";
|
||||
string commandText = "Select * from Users LEFT JOIN (SELECT Id As ProfileId, UserId FROM UserProfiles) UserProfiles ON Users.Id = UserProfiles.UserId where NormalizedEmail = @name";
|
||||
if (!searchAsEmail)
|
||||
{
|
||||
commandText = "Select * from Users LEFT JOIN (SELECT Id As ProfileId, UserId FROM UserProfiles) UserProfiles ON Users.Id = UserProfiles.UserId where NormalizedUserName = @name";
|
||||
}
|
||||
Dictionary<string, object> parameters = new Dictionary<string, object>() { { "@name", normalizedUserName } };
|
||||
|
||||
var rows = _database.ExecuteCMDDict(commandText, parameters);
|
||||
foreach(Dictionary<string, object> row in rows)
|
||||
foreach (Dictionary<string, object> row in rows)
|
||||
{
|
||||
TUser user = (TUser)Activator.CreateInstance(typeof(TUser));
|
||||
user.Id = (string)row["Id"];
|
||||
|
|
@ -127,7 +133,7 @@ namespace Authentication
|
|||
user.SecurityStamp = (string?)(string.IsNullOrEmpty((string?)row["SecurityStamp"]) ? null : row["SecurityStamp"]);
|
||||
user.ConcurrencyStamp = (string?)(string.IsNullOrEmpty((string?)row["ConcurrencyStamp"]) ? null : row["ConcurrencyStamp"]);
|
||||
user.Email = (string?)(string.IsNullOrEmpty((string?)row["Email"]) ? null : row["Email"]);
|
||||
user.EmailConfirmed = row["EmailConfirmed"] == "1" ? true:false;
|
||||
user.EmailConfirmed = row["EmailConfirmed"] == "1" ? true : false;
|
||||
user.PhoneNumber = (string?)(string.IsNullOrEmpty((string?)row["PhoneNumber"]) ? null : row["PhoneNumber"]);
|
||||
user.PhoneNumberConfirmed = row["PhoneNumberConfirmed"] == "1" ? true : false;
|
||||
user.NormalizedEmail = (string?)(string.IsNullOrEmpty((string?)row["NormalizedEmail"]) ? null : row["NormalizedEmail"]);
|
||||
|
|
@ -135,10 +141,12 @@ namespace Authentication
|
|||
user.LockoutEnabled = row["LockoutEnabled"] == "1" ? true : false;
|
||||
user.LockoutEnd = string.IsNullOrEmpty((string?)row["LockoutEnd"]) ? DateTime.Now : DateTime.Parse((string?)row["LockoutEnd"]);
|
||||
user.AccessFailedCount = string.IsNullOrEmpty((string?)row["AccessFailedCount"]) ? 0 : int.Parse((string?)row["AccessFailedCount"]);
|
||||
user.TwoFactorEnabled = row["TwoFactorEnabled"] == "1" ? true:false;
|
||||
// Handle both "1"/"0" and "true"/"false" (case-insensitive) for TwoFactorEnabled
|
||||
var twoFactorValue = row["TwoFactorEnabled"]?.ToString()?.ToLowerInvariant();
|
||||
user.TwoFactorEnabled = twoFactorValue == "1" || twoFactorValue == "true";
|
||||
user.SecurityProfile = GetSecurityProfile(user);
|
||||
user.UserPreferences = GetPreferences(user);
|
||||
user.Avatar = string.IsNullOrEmpty((string?)row["AvatarId"]) ? Guid.Empty : Guid.Parse((string?)row["AvatarId"]);
|
||||
user.ProfileId = string.IsNullOrEmpty((string?)row["ProfileId"]) ? Guid.Empty : Guid.Parse((string?)row["ProfileId"]);
|
||||
users.Add(user);
|
||||
}
|
||||
|
||||
|
|
@ -148,10 +156,10 @@ namespace Authentication
|
|||
public List<TUser> GetUsers()
|
||||
{
|
||||
List<TUser> users = new List<TUser>();
|
||||
string commandText = "Select * from Users LEFT JOIN (SELECT UserId, Id AS AvatarId FROM UserAvatars) UserAvatars ON Users.Id = UserAvatars.UserId order by NormalizedUserName";
|
||||
|
||||
string commandText = "Select * from Users LEFT JOIN (SELECT Id As ProfileId, UserId FROM UserProfiles) UserProfiles ON Users.Id = UserProfiles.UserId order by NormalizedUserName";
|
||||
|
||||
var rows = _database.ExecuteCMDDict(commandText);
|
||||
foreach(Dictionary<string, object> row in rows)
|
||||
foreach (Dictionary<string, object> row in rows)
|
||||
{
|
||||
TUser user = (TUser)Activator.CreateInstance(typeof(TUser));
|
||||
user.Id = (string)row["Id"];
|
||||
|
|
@ -160,7 +168,7 @@ namespace Authentication
|
|||
user.SecurityStamp = (string?)(string.IsNullOrEmpty((string?)row["SecurityStamp"]) ? null : row["SecurityStamp"]);
|
||||
user.ConcurrencyStamp = (string?)(string.IsNullOrEmpty((string?)row["ConcurrencyStamp"]) ? null : row["ConcurrencyStamp"]);
|
||||
user.Email = (string?)(string.IsNullOrEmpty((string?)row["Email"]) ? null : row["Email"]);
|
||||
user.EmailConfirmed = row["EmailConfirmed"] == "1" ? true:false;
|
||||
user.EmailConfirmed = row["EmailConfirmed"] == "1" ? true : false;
|
||||
user.PhoneNumber = (string?)(string.IsNullOrEmpty((string?)row["PhoneNumber"]) ? null : row["PhoneNumber"]);
|
||||
user.PhoneNumberConfirmed = row["PhoneNumberConfirmed"] == "1" ? true : false;
|
||||
user.NormalizedEmail = (string?)(string.IsNullOrEmpty((string?)row["NormalizedEmail"]) ? null : row["NormalizedEmail"]);
|
||||
|
|
@ -168,10 +176,12 @@ namespace Authentication
|
|||
user.LockoutEnabled = row["LockoutEnabled"] == "1" ? true : false;
|
||||
user.LockoutEnd = string.IsNullOrEmpty((string?)row["LockoutEnd"]) ? DateTime.Now : DateTime.Parse((string?)row["LockoutEnd"]);
|
||||
user.AccessFailedCount = string.IsNullOrEmpty((string?)row["AccessFailedCount"]) ? 0 : int.Parse((string?)row["AccessFailedCount"]);
|
||||
user.TwoFactorEnabled = row["TwoFactorEnabled"] == "1" ? true:false;
|
||||
// Handle both "1"/"0" and "true"/"false" (case-insensitive) for TwoFactorEnabled
|
||||
var twoFactorValue = row["TwoFactorEnabled"]?.ToString()?.ToLowerInvariant();
|
||||
user.TwoFactorEnabled = twoFactorValue == "1" || twoFactorValue == "true";
|
||||
user.SecurityProfile = GetSecurityProfile(user);
|
||||
user.UserPreferences = GetPreferences(user);
|
||||
user.Avatar = string.IsNullOrEmpty((string?)row["AvatarId"]) ? Guid.Empty : Guid.Parse((string?)row["AvatarId"]);
|
||||
user.ProfileId = string.IsNullOrEmpty((string?)row["ProfileId"]) ? Guid.Empty : Guid.Parse((string?)row["ProfileId"]);
|
||||
users.Add(user);
|
||||
}
|
||||
|
||||
|
|
@ -180,7 +190,7 @@ namespace Authentication
|
|||
|
||||
public TUser GetUserByEmail(string email)
|
||||
{
|
||||
List<TUser> users = GetUserByName(email);
|
||||
List<TUser> users = GetUserByName(email, true);
|
||||
if (users.Count == 0)
|
||||
{
|
||||
return null;
|
||||
|
|
@ -258,10 +268,11 @@ namespace Authentication
|
|||
/// <returns></returns>
|
||||
public int Insert(TUser user)
|
||||
{
|
||||
string commandText = @"Insert into Users (UserName, Id, PasswordHash, SecurityStamp, ConcurrencyStamp, Email, EmailConfirmed, PhoneNumber, PhoneNumberConfirmed, NormalizedEmail, NormalizedUserName, AccessFailedCount, LockoutEnabled, LockoutEnd, TwoFactorEnabled) values (@name, @id, @pwdHash, @SecStamp, @concurrencystamp, @email ,@emailconfirmed ,@phonenumber, @phonenumberconfirmed, @normalizedemail, @normalizedusername, @accesscount, @lockoutenabled, @lockoutenddate, @twofactorenabled);";
|
||||
string commandText = @"Insert into Users (UserName, Id, PasswordHash, SecurityStamp, ConcurrencyStamp, Email, EmailConfirmed, PhoneNumber, PhoneNumberConfirmed, NormalizedEmail, NormalizedUserName, AccessFailedCount, LockoutEnabled, LockoutEnd, TwoFactorEnabled) values (@name, @id, @pwdHash, @SecStamp, @concurrencystamp, @email ,@emailconfirmed ,@phonenumber, @phonenumberconfirmed, @normalizedemail, @normalizedusername, @accesscount, @lockoutenabled, @lockoutenddate, @twofactorenabled); Insert into UserProfiles (Id, UserId, DisplayName, Quip, UnstructuredData) values (@profileId, @id, @email, '', '{}');";
|
||||
Dictionary<string, object> parameters = new Dictionary<string, object>();
|
||||
parameters.Add("@name", user.UserName);
|
||||
parameters.Add("@id", user.Id);
|
||||
parameters.Add("@profileId", Guid.NewGuid());
|
||||
parameters.Add("@pwdHash", user.PasswordHash);
|
||||
parameters.Add("@SecStamp", user.SecurityStamp);
|
||||
parameters.Add("@concurrencystamp", user.ConcurrencyStamp);
|
||||
|
|
@ -292,7 +303,7 @@ namespace Authentication
|
|||
/// <returns></returns>
|
||||
private int Delete(string userId)
|
||||
{
|
||||
string commandText = "Delete from Users where Id = @userId; Delete from User_Settings where Id = @userId; Delete from GameState where UserId = @userId;";
|
||||
string commandText = "Delete from Users where Id = @userId; Delete from User_Settings where Id = @userId; Delete from UserProfiles where UserId = @userId; Delete from GameState where UserId = @userId;";
|
||||
Dictionary<string, object> parameters = new Dictionary<string, object>();
|
||||
parameters.Add("@userId", userId);
|
||||
|
||||
|
|
@ -376,7 +387,7 @@ namespace Authentication
|
|||
Dictionary<string, object> parameters = new Dictionary<string, object>();
|
||||
parameters.Add("Id", user.Id);
|
||||
parameters.Add("SecurityProfile", Newtonsoft.Json.JsonConvert.SerializeObject(securityProfile));
|
||||
|
||||
|
||||
return _database.ExecuteCMD(commandText, parameters).Rows.Count;
|
||||
}
|
||||
|
||||
|
|
@ -408,7 +419,7 @@ namespace Authentication
|
|||
List<UserPreferenceViewModel> userPreferences = GetPreferences(user);
|
||||
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
|
||||
foreach (UserPreferenceViewModel modelItem in model)
|
||||
{
|
||||
bool prefItemFound = false;
|
||||
|
|
@ -449,7 +460,7 @@ namespace Authentication
|
|||
{
|
||||
{ "userid", user.Id }
|
||||
};
|
||||
|
||||
|
||||
if (bytes.Length == 0)
|
||||
{
|
||||
sql = "DELETE FROM UserAvatars WHERE UserId = @userid";
|
||||
|
|
@ -32,8 +32,7 @@ namespace Authentication
|
|||
public class LoginViewModel
|
||||
{
|
||||
[Required]
|
||||
[EmailAddress]
|
||||
[Display(Name = "Email")]
|
||||
[Display(Name = "Email or Username")]
|
||||
public string Email { get; set; }
|
||||
|
||||
[Required]
|
||||
|
|
@ -97,4 +96,33 @@ namespace Authentication
|
|||
[Display(Name = "Email")]
|
||||
public string Email { get; set; }
|
||||
}
|
||||
|
||||
public class ChangeUsernameViewModel
|
||||
{
|
||||
[Required]
|
||||
[StringLength(30, MinimumLength = 3, ErrorMessage = "The {0} must be between {2} and {1} characters long.")]
|
||||
[RegularExpression("^[A-Za-z0-9_.@-]+$", ErrorMessage = "Only letters, numbers, underscores (_), dashes (-), periods (.), and at signs (@) are allowed.")]
|
||||
[Display(Name = "New user name")]
|
||||
public string NewUserName { get; set; }
|
||||
}
|
||||
|
||||
public class TwoFactorVerifyViewModel
|
||||
{
|
||||
[Required]
|
||||
[Display(Name = "Authenticator code")]
|
||||
public string Code { get; set; }
|
||||
|
||||
[Display(Name = "Remember me?")]
|
||||
public bool RememberMe { get; set; }
|
||||
|
||||
[Display(Name = "Remember this device")]
|
||||
public bool RememberMachine { get; set; }
|
||||
}
|
||||
|
||||
public class TwoFactorRecoveryViewModel
|
||||
{
|
||||
[Required]
|
||||
[Display(Name = "Recovery code")]
|
||||
public string RecoveryCode { get; set; }
|
||||
}
|
||||
}
|
||||
|
|
@ -8,8 +8,9 @@ namespace Authentication
|
|||
public List<String> Roles { get; set; }
|
||||
public SecurityProfileViewModel SecurityProfile { get; set; }
|
||||
public List<UserPreferenceViewModel> UserPreferences { get; set; }
|
||||
public Guid Avatar { get; set; }
|
||||
public string HighestRole {
|
||||
public Guid ProfileId { get; set; }
|
||||
public string HighestRole
|
||||
{
|
||||
get
|
||||
{
|
||||
string _highestRole = "";
|
||||
48
gaseous-lib/Classes/Auth/Models/TwoFactorModels.cs
Normal file
48
gaseous-lib/Classes/Auth/Models/TwoFactorModels.cs
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace Authentication
|
||||
{
|
||||
/// <summary>
|
||||
/// Status information for Two-Factor Authentication.
|
||||
/// </summary>
|
||||
public class TwoFactorStatusModel
|
||||
{
|
||||
/// <summary>Whether two-factor authentication is enabled for the user.</summary>
|
||||
public bool Enabled { get; set; }
|
||||
/// <summary>Whether an authenticator key has been provisioned for the user.</summary>
|
||||
public bool HasAuthenticatorKey { get; set; }
|
||||
/// <summary>The number of remaining recovery codes.</summary>
|
||||
public int RecoveryCodesLeft { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to generate a number of new recovery codes.
|
||||
/// </summary>
|
||||
public class GenerateRecoveryCodesRequest
|
||||
{
|
||||
/// <summary>The number of recovery codes to generate.</summary>
|
||||
[Range(1, 100)]
|
||||
public int Count { get; set; } = 10;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request used by admins to disable two-factor on a target user.
|
||||
/// </summary>
|
||||
public class AdminDisable2FARequest
|
||||
{
|
||||
/// <summary>The target user's ID.</summary>
|
||||
public string? UserId { get; set; }
|
||||
/// <summary>The target user's email address.</summary>
|
||||
public string? Email { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to confirm and enable two-factor using an authenticator code.
|
||||
/// </summary>
|
||||
public class ConfirmAuthenticatorRequest
|
||||
{
|
||||
/// <summary>The 6-digit TOTP code from the authenticator app.</summary>
|
||||
[Required]
|
||||
public string Code { get; set; } = string.Empty;
|
||||
}
|
||||
}
|
||||
57
gaseous-lib/Classes/Auth/Models/UserViewModel.cs
Normal file
57
gaseous-lib/Classes/Auth/Models/UserViewModel.cs
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
namespace Authentication
|
||||
{
|
||||
public class UserViewModel
|
||||
{
|
||||
public string Id { get; set; }
|
||||
public string UserName { get; set; }
|
||||
public string EmailAddress { get; set; }
|
||||
public bool LockoutEnabled { get; set; }
|
||||
public DateTimeOffset? LockoutEnd { get; set; }
|
||||
public List<string> Roles { get; set; }
|
||||
public SecurityProfileViewModel SecurityProfile { get; set; }
|
||||
public Guid ProfileId { get; set; }
|
||||
public string HighestRole
|
||||
{
|
||||
get
|
||||
{
|
||||
string _highestRole = "";
|
||||
if (Roles != null)
|
||||
{
|
||||
foreach (string role in Roles)
|
||||
{
|
||||
switch (role)
|
||||
{
|
||||
case "Admin":
|
||||
// there is no higher
|
||||
_highestRole = role;
|
||||
break;
|
||||
case "Gamer":
|
||||
// only one high is Admin, so check for that
|
||||
if (_highestRole != "Admin")
|
||||
{
|
||||
_highestRole = role;
|
||||
}
|
||||
break;
|
||||
case "Player":
|
||||
// make sure _highestRole isn't already set to Gamer or Admin
|
||||
if (_highestRole != "Admin" && _highestRole != "Gamer")
|
||||
{
|
||||
_highestRole = role;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
_highestRole = "Player";
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_highestRole = "Player";
|
||||
}
|
||||
|
||||
return _highestRole;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
174
gaseous-lib/Classes/Bios.cs
Normal file
174
gaseous-lib/Classes/Bios.cs
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Classes.Metadata;
|
||||
using gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
public class Bios
|
||||
{
|
||||
public Bios()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public static void ImportBiosFile(string FilePath, HashObject Hash, ref Dictionary<string, object> BiosFileInfo)
|
||||
{
|
||||
BiosFileInfo.Add("type", "bios");
|
||||
BiosFileInfo.Add("status", "notimported");
|
||||
|
||||
foreach (Classes.Bios.BiosItem biosItem in Classes.Bios.GetBios().Result)
|
||||
{
|
||||
if (biosItem.Available == false)
|
||||
{
|
||||
if (biosItem.hash == Hash.md5hash)
|
||||
{
|
||||
string biosPath = Path.Combine(Config.LibraryConfiguration.LibraryFirmwareDirectory, biosItem.hash + ".bios");
|
||||
Logging.LogKey(Logging.LogType.Information, "process.import_bios_file", "importbiosfile.is_a_bios_file_moving_to", null, new string[] { FilePath, biosPath });
|
||||
|
||||
File.Move(FilePath, biosItem.biosPath, true);
|
||||
|
||||
BiosFileInfo.Add("name", biosItem.filename);
|
||||
BiosFileInfo.Add("platform", Platforms.GetPlatform(biosItem.platformid));
|
||||
BiosFileInfo["status"] = "imported";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (biosItem.hash == Hash.md5hash)
|
||||
{
|
||||
BiosFileInfo["status"] = "duplicate";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void MigrateToNewFolderStructure()
|
||||
{
|
||||
// migrate from old BIOS file structure which had each bios file inside a folder named for the platform to the new structure which has each file in a subdirectory named after the MD5 hash
|
||||
if (Directory.Exists(Config.LibraryConfiguration.LibraryBIOSDirectory))
|
||||
{
|
||||
foreach (Models.PlatformMapping.PlatformMapItem platformMapping in Models.PlatformMapping.PlatformMap)
|
||||
{
|
||||
if (platformMapping.Bios != null)
|
||||
{
|
||||
foreach (Models.PlatformMapping.PlatformMapItem.EmulatorBiosItem emulatorBiosItem in platformMapping.Bios)
|
||||
{
|
||||
string oldBiosPath = Path.Combine(Config.LibraryConfiguration.LibraryBIOSDirectory, platformMapping.IGDBSlug.ToString(), emulatorBiosItem.filename);
|
||||
string newBiosPath = Path.Combine(Config.LibraryConfiguration.LibraryFirmwareDirectory, emulatorBiosItem.hash + ".bios");
|
||||
|
||||
if (File.Exists(oldBiosPath))
|
||||
{
|
||||
File.Copy(oldBiosPath, newBiosPath, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// remove old BIOS folder structure
|
||||
Directory.Delete(Config.LibraryConfiguration.LibraryBIOSDirectory, true);
|
||||
}
|
||||
}
|
||||
|
||||
public static Models.PlatformMapping.PlatformMapItem? BiosHashSignatureLookup(string MD5)
|
||||
{
|
||||
foreach (Models.PlatformMapping.PlatformMapItem platformMapping in Models.PlatformMapping.PlatformMap)
|
||||
{
|
||||
if (platformMapping.Bios != null)
|
||||
{
|
||||
foreach (Models.PlatformMapping.PlatformMapItem.EmulatorBiosItem emulatorBiosItem in platformMapping.Bios)
|
||||
{
|
||||
if (emulatorBiosItem.hash.ToLower() == MD5.ToLower())
|
||||
{
|
||||
return platformMapping;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public static async Task<List<BiosItem>> GetBios()
|
||||
{
|
||||
return await BuildBiosList();
|
||||
}
|
||||
|
||||
public static async Task<List<BiosItem>> GetBios(long PlatformId, bool HideUnavailable)
|
||||
{
|
||||
List<BiosItem> biosItems = new List<BiosItem>();
|
||||
foreach (BiosItem biosItem in await BuildBiosList())
|
||||
{
|
||||
if (biosItem.platformid == PlatformId)
|
||||
{
|
||||
if (HideUnavailable == true)
|
||||
{
|
||||
if (biosItem.Available == true)
|
||||
{
|
||||
biosItems.Add(biosItem);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
biosItems.Add(biosItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return biosItems;
|
||||
}
|
||||
|
||||
private static async Task<List<BiosItem>> BuildBiosList()
|
||||
{
|
||||
List<BiosItem> biosItems = new List<BiosItem>();
|
||||
|
||||
foreach (Models.PlatformMapping.PlatformMapItem platformMapping in Models.PlatformMapping.PlatformMap)
|
||||
{
|
||||
if (platformMapping.Bios != null)
|
||||
{
|
||||
Platform platform = await Metadata.Platforms.GetPlatform(platformMapping.IGDBId);
|
||||
|
||||
foreach (Models.PlatformMapping.PlatformMapItem.EmulatorBiosItem emulatorBios in platformMapping.Bios)
|
||||
{
|
||||
BiosItem biosItem = new BiosItem
|
||||
{
|
||||
platformid = platformMapping.IGDBId,
|
||||
platformslug = platform.Slug,
|
||||
platformname = platform.Name,
|
||||
description = emulatorBios.description,
|
||||
filename = emulatorBios.filename,
|
||||
hash = emulatorBios.hash.ToLower()
|
||||
};
|
||||
biosItems.Add(biosItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
return biosItems;
|
||||
}
|
||||
|
||||
public class BiosItem : Models.PlatformMapping.PlatformMapItem.EmulatorBiosItem
|
||||
{
|
||||
public long platformid { get; set; }
|
||||
public string platformslug { get; set; }
|
||||
public string platformname { get; set; }
|
||||
public string biosPath
|
||||
{
|
||||
get
|
||||
{
|
||||
return Path.Combine(Config.LibraryConfiguration.LibraryFirmwareDirectory, hash + ".bios");
|
||||
}
|
||||
}
|
||||
public bool Available
|
||||
{
|
||||
get
|
||||
{
|
||||
bool fileExists = File.Exists(biosPath);
|
||||
return fileExists;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
63
gaseous-lib/Classes/CRC32.cs
Normal file
63
gaseous-lib/Classes/CRC32.cs
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
|
||||
public static class CRC32
|
||||
{
|
||||
private static readonly uint[] Table;
|
||||
|
||||
// Static constructor to initialize the CRC32 table
|
||||
static CRC32()
|
||||
{
|
||||
const uint polynomial = 0xedb88320;
|
||||
Table = new uint[256];
|
||||
|
||||
for (uint i = 0; i < 256; i++)
|
||||
{
|
||||
uint crc = i;
|
||||
for (int j = 8; j > 0; j--)
|
||||
{
|
||||
if ((crc & 1) == 1)
|
||||
{
|
||||
crc = (crc >> 1) ^ polynomial;
|
||||
}
|
||||
else
|
||||
{
|
||||
crc >>= 1;
|
||||
}
|
||||
}
|
||||
Table[i] = crc;
|
||||
}
|
||||
}
|
||||
|
||||
// Compute the CRC32 hash for a byte array
|
||||
public static uint Compute(byte[] data)
|
||||
{
|
||||
uint crc = 0xffffffff;
|
||||
|
||||
foreach (byte b in data)
|
||||
{
|
||||
byte tableIndex = (byte)((crc & 0xff) ^ b);
|
||||
crc = (crc >> 8) ^ Table[tableIndex];
|
||||
}
|
||||
|
||||
return ~crc;
|
||||
}
|
||||
|
||||
// Compute the CRC32 hash for a file
|
||||
public static uint ComputeFile(string filePath)
|
||||
{
|
||||
uint crc = 0xffffffff;
|
||||
|
||||
using (FileStream fs = File.OpenRead(filePath))
|
||||
{
|
||||
int byteRead;
|
||||
while ((byteRead = fs.ReadByte()) != -1)
|
||||
{
|
||||
byte tableIndex = (byte)((crc & 0xff) ^ (byte)byteRead);
|
||||
crc = (crc >> 8) ^ Table[tableIndex];
|
||||
}
|
||||
}
|
||||
|
||||
return ~crc;
|
||||
}
|
||||
}
|
||||
982
gaseous-lib/Classes/Collections.cs
Normal file
982
gaseous-lib/Classes/Collections.cs
Normal file
|
|
@ -0,0 +1,982 @@
|
|||
// using System;
|
||||
// using System.Data;
|
||||
// using System.IO.Compression;
|
||||
// using System.Reflection;
|
||||
// using System.Runtime.InteropServices;
|
||||
// using System.Security.Cryptography;
|
||||
// using Authentication;
|
||||
// using gaseous_server.Classes.Metadata;
|
||||
// using gaseous_server.Controllers;
|
||||
// using gaseous_server.Controllers.v1_1;
|
||||
// using gaseous_server.Models;
|
||||
// using gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes;
|
||||
// using Microsoft.AspNetCore.Identity;
|
||||
// using Microsoft.AspNetCore.Mvc.Filters;
|
||||
// using Newtonsoft.Json;
|
||||
// using SharpCompress.Common;
|
||||
// using static gaseous_server.Classes.Metadata.Games;
|
||||
|
||||
// namespace gaseous_server.Classes
|
||||
// {
|
||||
// public class Collections
|
||||
// {
|
||||
// public static List<CollectionItem> GetCollections(string userid)
|
||||
// {
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
// string sql = "SELECT * FROM RomCollections WHERE OwnedBy=@ownedby ORDER BY `Name`";
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>{
|
||||
// { "ownedby", userid }
|
||||
// };
|
||||
// DataTable data = db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// List<CollectionItem> collectionItems = new List<CollectionItem>();
|
||||
|
||||
// foreach (DataRow row in data.Rows)
|
||||
// {
|
||||
// collectionItems.Add(BuildCollectionItem(row));
|
||||
// }
|
||||
|
||||
// return collectionItems;
|
||||
// }
|
||||
|
||||
// public static CollectionItem GetCollection(long Id, string userid)
|
||||
// {
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
// string sql;
|
||||
// if (userid == "")
|
||||
// {
|
||||
// // reserved for internal operations
|
||||
// sql = "SELECT * FROM RomCollections WHERE Id = @id ORDER BY `Name`";
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// // instigated by a user
|
||||
// sql = "SELECT * FROM RomCollections WHERE Id = @id AND OwnedBy = @ownedby ORDER BY `Name`";
|
||||
// }
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
// {
|
||||
// { "id", Id },
|
||||
// { "ownedby", userid }
|
||||
// };
|
||||
// DataTable romDT = db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// if (romDT.Rows.Count > 0)
|
||||
// {
|
||||
// DataRow row = romDT.Rows[0];
|
||||
// CollectionItem collectionItem = BuildCollectionItem(row);
|
||||
|
||||
// return collectionItem;
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// throw new Exception("Unknown Collection Id");
|
||||
// }
|
||||
// }
|
||||
|
||||
// public static CollectionItem NewCollection(CollectionItem item, string userid)
|
||||
// {
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
// string sql = "INSERT INTO RomCollections (`Name`, Description, Platforms, Genres, Players, PlayerPerspectives, Themes, MinimumRating, MaximumRating, MaximumRomsPerPlatform, MaximumBytesPerPlatform, MaximumCollectionSizeInBytes, FolderStructure, IncludeBIOSFiles, ArchiveType, AlwaysInclude, BuiltStatus, OwnedBy) VALUES (@name, @description, @platforms, @genres, @players, @playerperspectives, @themes, @minimumrating, @maximumrating, @maximumromsperplatform, @maximumbytesperplatform, @maximumcollectionsizeinbytes, @folderstructure, @includebiosfiles, @archivetype, @alwaysinclude, @builtstatus, @ownedby); SELECT CAST(LAST_INSERT_ID() AS SIGNED);";
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
// {
|
||||
// { "name", item.Name },
|
||||
// { "description", item.Description },
|
||||
// { "platforms", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Platforms, new List<long>())) },
|
||||
// { "genres", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Genres, new List<long>())) },
|
||||
// { "players", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Players, new List<long>())) },
|
||||
// { "playerperspectives", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.PlayerPerspectives, new List<long>())) },
|
||||
// { "themes", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Themes, new List<long>())) },
|
||||
// { "minimumrating", Common.ReturnValueIfNull(item.MinimumRating, -1) },
|
||||
// { "maximumrating", Common.ReturnValueIfNull(item.MaximumRating, -1) },
|
||||
// { "maximumromsperplatform", Common.ReturnValueIfNull(item.MaximumRomsPerPlatform, -1) },
|
||||
// { "maximumbytesperplatform", Common.ReturnValueIfNull(item.MaximumBytesPerPlatform, -1) },
|
||||
// { "maximumcollectionsizeinbytes", Common.ReturnValueIfNull(item.MaximumCollectionSizeInBytes, -1) },
|
||||
// { "folderstructure", Common.ReturnValueIfNull(item.FolderStructure, CollectionItem.FolderStructures.Gaseous) },
|
||||
// { "includebiosfiles", Common.ReturnValueIfNull(item.IncludeBIOSFiles, 0) },
|
||||
// { "archivetype", Common.ReturnValueIfNull(item.ArchiveType, CollectionItem.ArchiveTypes.Zip) },
|
||||
// { "alwaysinclude", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.AlwaysInclude, new List<CollectionItem.AlwaysIncludeItem>())) },
|
||||
// { "builtstatus", CollectionItem.CollectionBuildStatus.WaitingForBuild },
|
||||
// { "ownedby", userid }
|
||||
// };
|
||||
// DataTable romDT = db.ExecuteCMD(sql, dbDict);
|
||||
// long CollectionId = (long)romDT.Rows[0][0];
|
||||
|
||||
// CollectionItem collectionItem = GetCollection(CollectionId, userid);
|
||||
|
||||
// StartCollectionItemBuild(CollectionId, userid);
|
||||
|
||||
// return collectionItem;
|
||||
// }
|
||||
|
||||
// public static CollectionItem EditCollection(long Id, CollectionItem item, string userid, bool ForceRebuild = true)
|
||||
// {
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
// string sql = "UPDATE RomCollections SET `Name`=@name, Description=@description, Platforms=@platforms, Genres=@genres, Players=@players, PlayerPerspectives=@playerperspectives, Themes=@themes, MinimumRating=@minimumrating, MaximumRating=@maximumrating, MaximumRomsPerPlatform=@maximumromsperplatform, MaximumBytesPerPlatform=@maximumbytesperplatform, MaximumCollectionSizeInBytes=@maximumcollectionsizeinbytes, FolderStructure=@folderstructure, IncludeBIOSFiles=@includebiosfiles, ArchiveType=@archivetype, AlwaysInclude=@alwaysinclude, BuiltStatus=@builtstatus WHERE Id=@id AND OwnedBy=@ownedby";
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
// {
|
||||
// { "id", Id },
|
||||
// { "name", item.Name },
|
||||
// { "description", item.Description },
|
||||
// { "platforms", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Platforms, new List<long>())) },
|
||||
// { "genres", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Genres, new List<long>())) },
|
||||
// { "players", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Players, new List<long>())) },
|
||||
// { "playerperspectives", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.PlayerPerspectives, new List<long>())) },
|
||||
// { "themes", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.Themes, new List<long>())) },
|
||||
// { "minimumrating", Common.ReturnValueIfNull(item.MinimumRating, -1) },
|
||||
// { "maximumrating", Common.ReturnValueIfNull(item.MaximumRating, -1) },
|
||||
// { "maximumromsperplatform", Common.ReturnValueIfNull(item.MaximumRomsPerPlatform, -1) },
|
||||
// { "maximumbytesperplatform", Common.ReturnValueIfNull(item.MaximumBytesPerPlatform, -1) },
|
||||
// { "maximumcollectionsizeinbytes", Common.ReturnValueIfNull(item.MaximumCollectionSizeInBytes, -1) },
|
||||
// { "folderstructure", Common.ReturnValueIfNull(item.FolderStructure, CollectionItem.FolderStructures.Gaseous) },
|
||||
// { "includebiosfiles", Common.ReturnValueIfNull(item.IncludeBIOSFiles, 0) },
|
||||
// { "alwaysinclude", Newtonsoft.Json.JsonConvert.SerializeObject(Common.ReturnValueIfNull(item.AlwaysInclude, new List<CollectionItem.AlwaysIncludeItem>())) },
|
||||
// { "archivetype", Common.ReturnValueIfNull(item.ArchiveType, CollectionItem.ArchiveTypes.Zip) },
|
||||
// { "ownedby", userid }
|
||||
// };
|
||||
|
||||
// string CollectionZipFile = Path.Combine(Config.LibraryConfiguration.LibraryCollectionsDirectory, Id + item.ArchiveExtension);
|
||||
// if (ForceRebuild == true)
|
||||
// {
|
||||
// dbDict.Add("builtstatus", CollectionItem.CollectionBuildStatus.WaitingForBuild);
|
||||
// if (File.Exists(CollectionZipFile))
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Warning, "process.collections", "collections.deleting_existing_build", null, new string[] { item.Name });
|
||||
// File.Delete(CollectionZipFile);
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// if (File.Exists(CollectionZipFile))
|
||||
// {
|
||||
// dbDict.Add("builtstatus", CollectionItem.CollectionBuildStatus.Completed);
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// dbDict.Add("builtstatus", CollectionItem.CollectionBuildStatus.NoStatus);
|
||||
// }
|
||||
// }
|
||||
// db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// CollectionItem collectionItem = GetCollection(Id, userid);
|
||||
|
||||
// if (collectionItem.BuildStatus == CollectionItem.CollectionBuildStatus.WaitingForBuild)
|
||||
// {
|
||||
// StartCollectionItemBuild(Id, userid);
|
||||
// }
|
||||
|
||||
// return collectionItem;
|
||||
// }
|
||||
|
||||
// public static void DeleteCollection(long Id, string userid)
|
||||
// {
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
// string sql = "DELETE FROM RomCollections WHERE Id=@id AND OwnedBy=@ownedby";
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
// {
|
||||
// { "id", Id },
|
||||
// { "ownedby", userid }
|
||||
// };
|
||||
// db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// string CollectionZipFile = Path.Combine(Config.LibraryConfiguration.LibraryCollectionsDirectory, Id + ".zip");
|
||||
// if (File.Exists(CollectionZipFile))
|
||||
// {
|
||||
// File.Delete(CollectionZipFile);
|
||||
// }
|
||||
// }
|
||||
|
||||
// public static void StartCollectionItemBuild(long Id, string userid)
|
||||
// {
|
||||
// // send blank user id to getcollection as this is not a user initiated process
|
||||
// CollectionItem collectionItem = GetCollection(Id, userid);
|
||||
|
||||
// if (collectionItem.BuildStatus != CollectionItem.CollectionBuildStatus.Building)
|
||||
// {
|
||||
// // set collection item to waitingforbuild
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
// string sql = "UPDATE RomCollections SET BuiltStatus=@bs WHERE Id=@id";
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
// dbDict.Add("id", Id);
|
||||
// dbDict.Add("bs", CollectionItem.CollectionBuildStatus.WaitingForBuild);
|
||||
// db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// // start background task
|
||||
// ProcessQueue.QueueProcessor.QueueItem queueItem = new ProcessQueue.QueueProcessor.QueueItem(ProcessQueue.QueueItemType.CollectionCompiler, 1, false, true);
|
||||
// queueItem.Options = new Dictionary<string, object>{
|
||||
// { "Id", Id },
|
||||
// { "UserId", userid }
|
||||
// };
|
||||
// queueItem.ForceExecute();
|
||||
// ProcessQueue.QueueProcessor.QueueItems.Add(queueItem);
|
||||
// }
|
||||
// }
|
||||
|
||||
// public static CollectionContents GetCollectionContent(CollectionItem collectionItem, string userid)
|
||||
// {
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
// // get age ratings for specified user
|
||||
// List<AgeGroups.AgeRestrictionGroupings> UserAgeGroupings = new List<AgeGroups.AgeRestrictionGroupings>();
|
||||
// bool UserAgeGroupIncludeUnrated = true;
|
||||
// ApplicationUser? user = null;
|
||||
// if (userid != "")
|
||||
// {
|
||||
// Authentication.UserTable<Authentication.ApplicationUser> userTable = new UserTable<ApplicationUser>(db);
|
||||
// user = userTable.GetUserById(userid);
|
||||
|
||||
// if (user.SecurityProfile.AgeRestrictionPolicy.IncludeUnrated == false)
|
||||
// {
|
||||
// UserAgeGroupIncludeUnrated = false;
|
||||
// }
|
||||
|
||||
// foreach (AgeGroups.AgeRestrictionGroupings ageGrouping in Enum.GetValues(typeof(AgeGroups.AgeRestrictionGroupings)))
|
||||
// {
|
||||
// if (ageGrouping <= user.SecurityProfile.AgeRestrictionPolicy.MaximumAgeRestriction && ageGrouping != AgeGroups.AgeRestrictionGroupings.Unclassified)
|
||||
// {
|
||||
// UserAgeGroupings.Add(ageGrouping);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// List<CollectionContents.CollectionPlatformItem> collectionPlatformItems = new List<CollectionContents.CollectionPlatformItem>();
|
||||
|
||||
// // get platforms
|
||||
// List<long> platformids = new List<long>();
|
||||
// platformids.AddRange(collectionItem.Platforms);
|
||||
|
||||
// List<long>? DynamicPlatforms = new List<long>();
|
||||
// DynamicPlatforms.AddRange(collectionItem.Platforms);
|
||||
|
||||
// List<Platform> platforms = new List<Platform>();
|
||||
|
||||
// // add platforms with an inclusion status
|
||||
// foreach (CollectionItem.AlwaysIncludeItem alwaysIncludeItem in collectionItem.AlwaysInclude)
|
||||
// {
|
||||
// if (
|
||||
// alwaysIncludeItem.InclusionState == CollectionItem.AlwaysIncludeStatus.AlwaysInclude ||
|
||||
// alwaysIncludeItem.InclusionState == CollectionItem.AlwaysIncludeStatus.AlwaysExclude
|
||||
// )
|
||||
// {
|
||||
// if (!platformids.Contains(alwaysIncludeItem.PlatformId))
|
||||
// {
|
||||
// platformids.Add(alwaysIncludeItem.PlatformId);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// // add dynamic platforms
|
||||
// if (DynamicPlatforms.Count > 0)
|
||||
// {
|
||||
// foreach (long PlatformId in platformids)
|
||||
// {
|
||||
// platforms.Add(Platforms.GetPlatform(PlatformId).Result);
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// // get all platforms to pull from
|
||||
// Dictionary<string, List<Filters.FilterItem>> FilterDict = Filters.Filter(AgeGroups.AgeRestrictionGroupings.Adult, true).Result;
|
||||
// List<Classes.Filters.FilterItem> filteredPlatforms = (List<Classes.Filters.FilterItem>)FilterDict["platforms"];
|
||||
// foreach (Filters.FilterItem filterItem in filteredPlatforms)
|
||||
// {
|
||||
// platforms.Add(Platforms.GetPlatform((long)filterItem.Id).Result);
|
||||
// }
|
||||
// }
|
||||
|
||||
// // age ratings
|
||||
// AgeGroups.AgeRestrictionGroupings AgeGrouping = AgeGroups.AgeRestrictionGroupings.Unclassified;
|
||||
// bool ContainsUnclassifiedAgeGroup = false;
|
||||
|
||||
// // build collection
|
||||
// List<CollectionContents.CollectionPlatformItem> platformItems = new List<CollectionContents.CollectionPlatformItem>();
|
||||
|
||||
// foreach (Platform platform in platforms)
|
||||
// {
|
||||
// long TotalRomSize = 0;
|
||||
// long TotalGameCount = 0;
|
||||
|
||||
// bool isDynamic = false;
|
||||
// if (DynamicPlatforms.Contains((long)platform.Id))
|
||||
// {
|
||||
// isDynamic = true;
|
||||
// }
|
||||
// else if (DynamicPlatforms.Count == 0)
|
||||
// {
|
||||
// isDynamic = true;
|
||||
// }
|
||||
|
||||
// Controllers.v1_1.GamesController.GameReturnPackage games = new Controllers.v1_1.GamesController.GameReturnPackage();
|
||||
// if (isDynamic == true)
|
||||
// {
|
||||
// Controllers.v1_1.GamesController.GameSearchModel searchModel = new Controllers.v1_1.GamesController.GameSearchModel
|
||||
// {
|
||||
// Name = "",
|
||||
// Platform = new List<string>{
|
||||
// platform.Id.ToString()
|
||||
// },
|
||||
// Genre = collectionItem.Genres.ConvertAll(s => s.ToString()),
|
||||
// GameMode = collectionItem.Players.ConvertAll(s => s.ToString()),
|
||||
// PlayerPerspective = collectionItem.PlayerPerspectives.ConvertAll(s => s.ToString()),
|
||||
// Theme = collectionItem.Themes.ConvertAll(s => s.ToString()),
|
||||
// GameRating = new Controllers.v1_1.GamesController.GameSearchModel.GameRatingItem
|
||||
// {
|
||||
// MinimumRating = collectionItem.MinimumRating,
|
||||
// MaximumRating = collectionItem.MaximumRating
|
||||
// },
|
||||
// GameAgeRating = new Controllers.v1_1.GamesController.GameSearchModel.GameAgeRatingItem
|
||||
// {
|
||||
// AgeGroupings = UserAgeGroupings,
|
||||
// IncludeUnrated = UserAgeGroupIncludeUnrated
|
||||
// }
|
||||
// };
|
||||
// games = Controllers.v1_1.GamesController.GetGames(searchModel, user).Result;
|
||||
|
||||
// }
|
||||
|
||||
// CollectionContents.CollectionPlatformItem collectionPlatformItem = new CollectionContents.CollectionPlatformItem(platform);
|
||||
// collectionPlatformItem.Games = new List<CollectionContents.CollectionPlatformItem.CollectionGameItem>();
|
||||
|
||||
// // add titles with an inclusion status
|
||||
// foreach (CollectionItem.AlwaysIncludeItem alwaysIncludeItem in collectionItem.AlwaysInclude)
|
||||
// {
|
||||
// if (
|
||||
// (
|
||||
// alwaysIncludeItem.InclusionState == CollectionItem.AlwaysIncludeStatus.AlwaysInclude ||
|
||||
// alwaysIncludeItem.InclusionState == CollectionItem.AlwaysIncludeStatus.AlwaysExclude
|
||||
// ) && alwaysIncludeItem.PlatformId == platform.Id
|
||||
// )
|
||||
// {
|
||||
// MinimalGameItem AlwaysIncludeGame = new MinimalGameItem(Games.GetGame(FileSignature.MetadataSources.IGDB, alwaysIncludeItem.GameId).Result);
|
||||
// CollectionContents.CollectionPlatformItem.CollectionGameItem gameItem = new CollectionContents.CollectionPlatformItem.CollectionGameItem(AlwaysIncludeGame);
|
||||
// gameItem.InclusionStatus = new CollectionItem.AlwaysIncludeItem();
|
||||
// gameItem.InclusionStatus.PlatformId = alwaysIncludeItem.PlatformId;
|
||||
// gameItem.InclusionStatus.GameId = alwaysIncludeItem.GameId;
|
||||
// gameItem.InclusionStatus.InclusionState = alwaysIncludeItem.InclusionState;
|
||||
|
||||
// // execute Roms.GetRomsAsync and wait for it to finish
|
||||
// // this is a blocking call
|
||||
// gameItem.Roms = Task.Run(async () =>
|
||||
// {
|
||||
// var result = await Roms.GetRomsAsync((long)gameItem.Id, (long)platform.Id);
|
||||
// return result.GameRomItems;
|
||||
// }).Result;
|
||||
|
||||
// collectionPlatformItem.Games.Add(gameItem);
|
||||
// }
|
||||
// }
|
||||
|
||||
// foreach (MinimalGameItem game in games.Games)
|
||||
// {
|
||||
// bool gameAlreadyInList = false;
|
||||
// foreach (CollectionContents.CollectionPlatformItem.CollectionGameItem existingGame in collectionPlatformItem.Games)
|
||||
// {
|
||||
// if (existingGame.Id == game.Id)
|
||||
// {
|
||||
// gameAlreadyInList = true;
|
||||
// }
|
||||
// }
|
||||
|
||||
// if (gameAlreadyInList == false)
|
||||
// {
|
||||
// CollectionContents.CollectionPlatformItem.CollectionGameItem collectionGameItem = new CollectionContents.CollectionPlatformItem.CollectionGameItem(game);
|
||||
|
||||
// // Retrieve ROMs for the game synchronously
|
||||
// List<Roms.GameRomItem> gameRoms = Task.Run(async () =>
|
||||
// {
|
||||
// var result = await Roms.GetRomsAsync((long)game.Id, (long)platform.Id);
|
||||
// return result.GameRomItems;
|
||||
// }).Result;
|
||||
|
||||
// // Calculate total ROM size for the game
|
||||
// long GameRomSize = gameRoms.Sum(r => (long)r.Size);
|
||||
|
||||
// bool AddGame = false;
|
||||
// if (collectionItem.MaximumBytesPerPlatform > 0)
|
||||
// {
|
||||
// if ((TotalRomSize + GameRomSize) < collectionItem.MaximumBytesPerPlatform)
|
||||
// {
|
||||
// AddGame = true;
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// AddGame = true;
|
||||
// }
|
||||
|
||||
// if (AddGame == true)
|
||||
// {
|
||||
// TotalRomSize += GameRomSize;
|
||||
|
||||
// bool AddRoms = false;
|
||||
|
||||
// if (collectionItem.MaximumRomsPerPlatform > 0)
|
||||
// {
|
||||
// if (TotalGameCount < collectionItem.MaximumRomsPerPlatform)
|
||||
// {
|
||||
// AddRoms = true;
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// AddRoms = true;
|
||||
// }
|
||||
|
||||
// if (AddRoms == true)
|
||||
// {
|
||||
// TotalGameCount += 1;
|
||||
// collectionGameItem.Roms = gameRoms;
|
||||
// collectionPlatformItem.Games.Add(collectionGameItem);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// // handle age grouping
|
||||
// List<AgeRating> gameAgeRatings = game.AgeRatings.Select(s => (AgeRating)s).ToList();
|
||||
// AgeGroups.AgeRestrictionGroupings CurrentAgeGroup = AgeGroups.GetAgeGroupFromAgeRatings(gameAgeRatings);
|
||||
// if (CurrentAgeGroup > AgeGrouping)
|
||||
// {
|
||||
// AgeGrouping = CurrentAgeGroup;
|
||||
// }
|
||||
// if (CurrentAgeGroup == AgeGroups.AgeRestrictionGroupings.Unclassified)
|
||||
// {
|
||||
// ContainsUnclassifiedAgeGroup = true;
|
||||
// }
|
||||
// }
|
||||
|
||||
// collectionPlatformItem.Games.Sort((x, y) => x.Name.CompareTo(y.Name));
|
||||
|
||||
// if (collectionPlatformItem.Games.Count > 0)
|
||||
// {
|
||||
// bool AddPlatform = false;
|
||||
// if (collectionItem.MaximumCollectionSizeInBytes > 0)
|
||||
// {
|
||||
// if (TotalRomSize < collectionItem.MaximumCollectionSizeInBytes)
|
||||
// {
|
||||
// AddPlatform = true;
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// AddPlatform = true;
|
||||
// }
|
||||
|
||||
// if (AddPlatform == true)
|
||||
// {
|
||||
// collectionPlatformItems.Add(collectionPlatformItem);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// collectionPlatformItems.Sort((x, y) => x.Name.CompareTo(y.Name));
|
||||
|
||||
// CollectionContents collectionContents = new CollectionContents
|
||||
// {
|
||||
// Collection = collectionPlatformItems,
|
||||
// AgeGroup = AgeGrouping,
|
||||
// ContainsUnclassifiedAgeGroup = ContainsUnclassifiedAgeGroup
|
||||
// };
|
||||
|
||||
// return collectionContents;
|
||||
// }
|
||||
|
||||
// public static void CompileCollections(long CollectionId, string userid)
|
||||
// {
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
// CollectionItem collectionItem = GetCollection(CollectionId, userid);
|
||||
// if (collectionItem.BuildStatus == CollectionItem.CollectionBuildStatus.WaitingForBuild)
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Information, "process.collections", "collections.beginning_build", null, new string[] { collectionItem.Name });
|
||||
|
||||
// CollectionContents collectionContents = GetCollectionContent(collectionItem, userid);
|
||||
|
||||
// // set starting
|
||||
// string sql = "UPDATE RomCollections SET BuiltStatus=@bs, AgeGroup=@ag, AgeGroupUnclassified=@agu WHERE Id=@id";
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
// {
|
||||
// { "id", collectionItem.Id },
|
||||
// { "bs", CollectionItem.CollectionBuildStatus.Building },
|
||||
// { "ag", collectionContents.AgeGroup },
|
||||
// { "agu", collectionContents.ContainsUnclassifiedAgeGroup }
|
||||
// };
|
||||
// db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// List<CollectionContents.CollectionPlatformItem> collectionPlatformItems = collectionContents.Collection;
|
||||
// string ZipFilePath = Path.Combine(Config.LibraryConfiguration.LibraryCollectionsDirectory, collectionItem.Id + collectionItem.ArchiveExtension);
|
||||
// string ZipFileTempPath = Path.Combine(Config.LibraryConfiguration.LibraryTempDirectory, collectionItem.Id.ToString());
|
||||
|
||||
// try
|
||||
// {
|
||||
|
||||
// // clean up if needed
|
||||
// if (File.Exists(ZipFilePath))
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Warning, "process.collections", "collections.deleting_existing_build", null, new string[] { collectionItem.Name });
|
||||
// File.Delete(ZipFilePath);
|
||||
// }
|
||||
|
||||
// if (Directory.Exists(ZipFileTempPath))
|
||||
// {
|
||||
// Directory.Delete(ZipFileTempPath, true);
|
||||
// }
|
||||
|
||||
// // gather collection files
|
||||
// Directory.CreateDirectory(ZipFileTempPath);
|
||||
// string ZipBiosPath = Path.Combine(ZipFileTempPath, "BIOS");
|
||||
|
||||
// // get the games
|
||||
// foreach (CollectionContents.CollectionPlatformItem collectionPlatformItem in collectionPlatformItems)
|
||||
// {
|
||||
// // get platform bios files if present
|
||||
// if (collectionItem.IncludeBIOSFiles == true)
|
||||
// {
|
||||
// List<Bios.BiosItem> bios = Bios.GetBios(collectionPlatformItem.Id, true).Result;
|
||||
// if (!Directory.Exists(ZipBiosPath))
|
||||
// {
|
||||
// Directory.CreateDirectory(ZipBiosPath);
|
||||
// }
|
||||
|
||||
// foreach (Bios.BiosItem biosItem in bios)
|
||||
// {
|
||||
// if (File.Exists(biosItem.biosPath))
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Information, "process.collections", "collections.copying_bios_file", null, new string[] { biosItem.filename });
|
||||
// File.Copy(biosItem.biosPath, Path.Combine(ZipBiosPath, biosItem.filename), true);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// // create platform directory
|
||||
// string ZipPlatformPath = "";
|
||||
// switch (collectionItem.FolderStructure)
|
||||
// {
|
||||
// case CollectionItem.FolderStructures.Gaseous:
|
||||
// ZipPlatformPath = Path.Combine(ZipFileTempPath, collectionPlatformItem.Slug);
|
||||
// break;
|
||||
|
||||
// case CollectionItem.FolderStructures.RetroPie:
|
||||
// try
|
||||
// {
|
||||
// PlatformMapping.PlatformMapItem platformMapItem = PlatformMapping.GetPlatformMap(collectionPlatformItem.Id).Result;
|
||||
// ZipPlatformPath = Path.Combine(ZipFileTempPath, "roms", platformMapItem.RetroPieDirectoryName);
|
||||
// }
|
||||
// catch
|
||||
// {
|
||||
// ZipPlatformPath = Path.Combine(ZipFileTempPath, collectionPlatformItem.Slug);
|
||||
// }
|
||||
|
||||
// break;
|
||||
|
||||
// }
|
||||
// if (!Directory.Exists(ZipPlatformPath))
|
||||
// {
|
||||
// Directory.CreateDirectory(ZipPlatformPath);
|
||||
// }
|
||||
|
||||
// foreach (CollectionContents.CollectionPlatformItem.CollectionGameItem collectionGameItem in collectionPlatformItem.Games)
|
||||
// {
|
||||
// bool includeGame = false;
|
||||
// if (collectionGameItem.InclusionStatus == null)
|
||||
// {
|
||||
// includeGame = true;
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// if (collectionGameItem.InclusionStatus.InclusionState == CollectionItem.AlwaysIncludeStatus.AlwaysInclude)
|
||||
// {
|
||||
// includeGame = true;
|
||||
// }
|
||||
// }
|
||||
|
||||
// if (includeGame == true)
|
||||
// {
|
||||
// string ZipGamePath = "";
|
||||
// switch (collectionItem.FolderStructure)
|
||||
// {
|
||||
// case CollectionItem.FolderStructures.Gaseous:
|
||||
// // create game directory
|
||||
// ZipGamePath = Path.Combine(ZipPlatformPath, collectionGameItem.Slug);
|
||||
// if (!Directory.Exists(ZipGamePath))
|
||||
// {
|
||||
// Directory.CreateDirectory(ZipGamePath);
|
||||
// }
|
||||
// break;
|
||||
|
||||
// case CollectionItem.FolderStructures.RetroPie:
|
||||
// ZipGamePath = ZipPlatformPath;
|
||||
// break;
|
||||
// }
|
||||
|
||||
// // copy in roms
|
||||
// foreach (Roms.GameRomItem gameRomItem in collectionGameItem.Roms)
|
||||
// {
|
||||
// if (File.Exists(gameRomItem.Path))
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Information, "process.collections", "collections.copying_rom", null, new string[] { gameRomItem.Name });
|
||||
// File.Copy(gameRomItem.Path, Path.Combine(ZipGamePath, gameRomItem.Name), true);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// // compress to zip
|
||||
// Logging.LogKey(Logging.LogType.Information, "process.collections", "collections.compressing_collection");
|
||||
// switch (collectionItem.ArchiveType)
|
||||
// {
|
||||
// case CollectionItem.ArchiveTypes.Zip:
|
||||
// ZipFile.CreateFromDirectory(ZipFileTempPath, ZipFilePath, CompressionLevel.SmallestSize, false);
|
||||
// break;
|
||||
|
||||
// case CollectionItem.ArchiveTypes.RAR:
|
||||
|
||||
// break;
|
||||
|
||||
// case CollectionItem.ArchiveTypes.SevenZip:
|
||||
|
||||
// break;
|
||||
// }
|
||||
|
||||
|
||||
// // clean up
|
||||
// if (Directory.Exists(ZipFileTempPath))
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Information, "process.collections", "collections.cleaning_up");
|
||||
// Directory.Delete(ZipFileTempPath, true);
|
||||
// }
|
||||
|
||||
// // set completed
|
||||
// dbDict["bs"] = CollectionItem.CollectionBuildStatus.Completed;
|
||||
// db.ExecuteCMD(sql, dbDict);
|
||||
// }
|
||||
// catch (Exception ex)
|
||||
// {
|
||||
// // clean up
|
||||
// if (Directory.Exists(ZipFileTempPath))
|
||||
// {
|
||||
// Directory.Delete(ZipFileTempPath, true);
|
||||
// }
|
||||
|
||||
// if (File.Exists(ZipFilePath))
|
||||
// {
|
||||
// File.Delete(ZipFilePath);
|
||||
// }
|
||||
|
||||
// // set failed
|
||||
// dbDict["bs"] = CollectionItem.CollectionBuildStatus.Failed;
|
||||
// db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// Logging.LogKey(Logging.LogType.Critical, "process.collection_builder", "collections.build_failed", null, null, ex);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// private static CollectionItem BuildCollectionItem(DataRow row)
|
||||
// {
|
||||
// string strPlatforms = (string)Common.ReturnValueIfNull(row["Platforms"], "[ ]");
|
||||
// string strGenres = (string)Common.ReturnValueIfNull(row["Genres"], "[ ]");
|
||||
// string strPlayers = (string)Common.ReturnValueIfNull(row["Players"], "[ ]");
|
||||
// string strPlayerPerspectives = (string)Common.ReturnValueIfNull(row["PlayerPerspectives"], "[ ]");
|
||||
// string strThemes = (string)Common.ReturnValueIfNull(row["Themes"], "[ ]");
|
||||
// string strAlwaysInclude = (string)Common.ReturnValueIfNull(row["AlwaysInclude"], "[ ]");
|
||||
|
||||
// CollectionItem item = new CollectionItem();
|
||||
// item.Id = (long)row["Id"];
|
||||
// item.Name = (string)row["Name"];
|
||||
// item.Description = (string)row["Description"];
|
||||
// item.Platforms = Newtonsoft.Json.JsonConvert.DeserializeObject<List<long>>(strPlatforms);
|
||||
// item.Genres = Newtonsoft.Json.JsonConvert.DeserializeObject<List<long>>(strGenres);
|
||||
// item.Players = Newtonsoft.Json.JsonConvert.DeserializeObject<List<long>>(strPlayers);
|
||||
// item.PlayerPerspectives = Newtonsoft.Json.JsonConvert.DeserializeObject<List<long>>(strPlayerPerspectives);
|
||||
// item.Themes = Newtonsoft.Json.JsonConvert.DeserializeObject<List<long>>(strThemes);
|
||||
// item.MinimumRating = (int)Common.ReturnValueIfNull(row["MinimumRating"], -1);
|
||||
// item.MaximumRating = (int)Common.ReturnValueIfNull(row["MaximumRating"], -1);
|
||||
// item.MaximumRomsPerPlatform = (int)Common.ReturnValueIfNull(row["MaximumRomsPerPlatform"], (int)-1);
|
||||
// item.MaximumBytesPerPlatform = (long)Common.ReturnValueIfNull(row["MaximumBytesPerPlatform"], (long)-1);
|
||||
// item.MaximumCollectionSizeInBytes = (long)Common.ReturnValueIfNull(row["MaximumCollectionSizeInBytes"], (long)-1);
|
||||
// item.FolderStructure = (CollectionItem.FolderStructures)(int)Common.ReturnValueIfNull(row["FolderStructure"], 0);
|
||||
// item.IncludeBIOSFiles = (bool)row["IncludeBIOSFiles"];
|
||||
// item.ArchiveType = (CollectionItem.ArchiveTypes)(int)Common.ReturnValueIfNull(row["ArchiveType"], 0);
|
||||
// item.AlwaysInclude = Newtonsoft.Json.JsonConvert.DeserializeObject<List<CollectionItem.AlwaysIncludeItem>>(strAlwaysInclude);
|
||||
// item.BuildStatus = (CollectionItem.CollectionBuildStatus)(int)Common.ReturnValueIfNull(row["BuiltStatus"], 0);
|
||||
|
||||
// return item;
|
||||
// }
|
||||
|
||||
// public class CollectionItem
|
||||
// {
|
||||
// public CollectionItem()
|
||||
// {
|
||||
|
||||
// }
|
||||
|
||||
// public long Id { get; set; }
|
||||
// public string Name { get; set; }
|
||||
// public string Description { get; set; }
|
||||
// public List<long>? Platforms { get; set; }
|
||||
// public List<long>? Genres { get; set; }
|
||||
// public List<long>? Players { get; set; }
|
||||
// public List<long>? PlayerPerspectives { get; set; }
|
||||
// public List<long>? Themes { get; set; }
|
||||
// public int MinimumRating { get; set; }
|
||||
// public int MaximumRating { get; set; }
|
||||
// public int? MaximumRomsPerPlatform { get; set; }
|
||||
// public long? MaximumBytesPerPlatform { get; set; }
|
||||
// public long? MaximumCollectionSizeInBytes { get; set; }
|
||||
// public FolderStructures FolderStructure { get; set; } = FolderStructures.Gaseous;
|
||||
// public bool IncludeBIOSFiles { get; set; } = true;
|
||||
// public ArchiveTypes ArchiveType { get; set; } = CollectionItem.ArchiveTypes.Zip;
|
||||
// public string ArchiveExtension
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// if (ArchiveType != null)
|
||||
// {
|
||||
// switch (ArchiveType)
|
||||
// {
|
||||
// case ArchiveTypes.Zip:
|
||||
// default:
|
||||
// return ".zip";
|
||||
|
||||
// case ArchiveTypes.RAR:
|
||||
// return ".rar";
|
||||
|
||||
// case ArchiveTypes.SevenZip:
|
||||
// return ".7z";
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// return ".zip";
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// public List<AlwaysIncludeItem> AlwaysInclude { get; set; }
|
||||
|
||||
// [JsonIgnore]
|
||||
// public CollectionBuildStatus BuildStatus
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// if (_BuildStatus == CollectionBuildStatus.Completed)
|
||||
// {
|
||||
// if (File.Exists(Path.Combine(Config.LibraryConfiguration.LibraryCollectionsDirectory, Id + ArchiveExtension)))
|
||||
// {
|
||||
// return CollectionBuildStatus.Completed;
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// return CollectionBuildStatus.NoStatus;
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// return _BuildStatus;
|
||||
// }
|
||||
// }
|
||||
// set
|
||||
// {
|
||||
// _BuildStatus = value;
|
||||
// }
|
||||
// }
|
||||
// private CollectionBuildStatus _BuildStatus { get; set; }
|
||||
|
||||
// [JsonIgnore]
|
||||
// public long CollectionBuiltSizeBytes
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// if (BuildStatus == CollectionBuildStatus.Completed)
|
||||
// {
|
||||
// string ZipFilePath = Path.Combine(Config.LibraryConfiguration.LibraryCollectionsDirectory, Id + ArchiveExtension);
|
||||
// if (File.Exists(ZipFilePath))
|
||||
// {
|
||||
// FileInfo fi = new FileInfo(ZipFilePath);
|
||||
// return fi.Length;
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// return 0;
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// return 0;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// public enum CollectionBuildStatus
|
||||
// {
|
||||
// NoStatus = 0,
|
||||
// WaitingForBuild = 1,
|
||||
// Building = 2,
|
||||
// Completed = 3,
|
||||
// Failed = 4
|
||||
// }
|
||||
|
||||
// public enum FolderStructures
|
||||
// {
|
||||
// Gaseous = 0,
|
||||
// RetroPie = 1
|
||||
// }
|
||||
|
||||
// public enum ArchiveTypes
|
||||
// {
|
||||
// Zip = 0,
|
||||
// RAR = 1,
|
||||
// SevenZip = 2
|
||||
// }
|
||||
|
||||
// public class AlwaysIncludeItem
|
||||
// {
|
||||
// public long PlatformId { get; set; }
|
||||
// public long GameId { get; set; }
|
||||
// public AlwaysIncludeStatus InclusionState { get; set; }
|
||||
// }
|
||||
|
||||
// public enum AlwaysIncludeStatus
|
||||
// {
|
||||
// None = 0,
|
||||
// AlwaysInclude = 1,
|
||||
// AlwaysExclude = 2
|
||||
// }
|
||||
// }
|
||||
|
||||
// public class CollectionContents
|
||||
// {
|
||||
// [JsonIgnore]
|
||||
// public List<CollectionPlatformItem> Collection { get; set; }
|
||||
|
||||
// [JsonIgnore]
|
||||
// public long CollectionProjectedSizeBytes
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// long CollectionSize = 0;
|
||||
|
||||
// List<CollectionPlatformItem> collectionPlatformItems = new List<CollectionPlatformItem>();
|
||||
|
||||
// if (Collection != null)
|
||||
// {
|
||||
// collectionPlatformItems = Collection;
|
||||
// }
|
||||
|
||||
// foreach (CollectionPlatformItem platformItem in collectionPlatformItems)
|
||||
// {
|
||||
// CollectionSize += platformItem.RomSize;
|
||||
// }
|
||||
|
||||
// return CollectionSize;
|
||||
// }
|
||||
// }
|
||||
|
||||
// public AgeGroups.AgeRestrictionGroupings AgeGroup { get; set; }
|
||||
// public bool ContainsUnclassifiedAgeGroup { get; set; }
|
||||
|
||||
// public class CollectionPlatformItem
|
||||
// {
|
||||
// public CollectionPlatformItem(Platform platform)
|
||||
// {
|
||||
// string[] PropertyWhitelist = new string[] { "Id", "Name", "Slug" };
|
||||
|
||||
// PropertyInfo[] srcProperties = typeof(Platform).GetProperties();
|
||||
// PropertyInfo[] dstProperties = typeof(CollectionPlatformItem).GetProperties();
|
||||
// foreach (PropertyInfo srcProperty in srcProperties)
|
||||
// {
|
||||
// if (PropertyWhitelist.Contains<string>(srcProperty.Name))
|
||||
// {
|
||||
// foreach (PropertyInfo dstProperty in dstProperties)
|
||||
// {
|
||||
// if (srcProperty.Name == dstProperty.Name)
|
||||
// {
|
||||
// dstProperty.SetValue(this, srcProperty.GetValue(platform));
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// public long Id { get; set; }
|
||||
// public string Name { get; set; }
|
||||
// public string Slug { get; set; }
|
||||
|
||||
// public List<CollectionGameItem> Games { get; set; }
|
||||
|
||||
// public int RomCount
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// int Counter = 0;
|
||||
// foreach (CollectionGameItem Game in Games)
|
||||
// {
|
||||
// Counter += 1;
|
||||
// }
|
||||
|
||||
// return Counter;
|
||||
// }
|
||||
// }
|
||||
|
||||
// public long RomSize
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// long Size = 0;
|
||||
// foreach (CollectionGameItem Game in Games)
|
||||
// {
|
||||
// foreach (Roms.GameRomItem Rom in Game.Roms)
|
||||
// {
|
||||
// Size += (long)Rom.Size;
|
||||
// }
|
||||
// }
|
||||
|
||||
// return Size;
|
||||
// }
|
||||
// }
|
||||
|
||||
// public class CollectionGameItem : MinimalGameItem
|
||||
// {
|
||||
// public CollectionGameItem(MinimalGameItem gameObject)
|
||||
// {
|
||||
// this.Id = gameObject.Id;
|
||||
// this.Name = gameObject.Name;
|
||||
// this.Slug = gameObject.Slug;
|
||||
// this.TotalRating = gameObject.TotalRating;
|
||||
// this.TotalRatingCount = gameObject.TotalRatingCount;
|
||||
// this.Cover = gameObject.Cover;
|
||||
// this.Artworks = gameObject.Artworks;
|
||||
// this.FirstReleaseDate = gameObject.FirstReleaseDate;
|
||||
// this.AgeRatings = gameObject.AgeRatings;
|
||||
// }
|
||||
|
||||
// public AgeGroups.AgeRestrictionGroupings AgeGrouping
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// List<AgeRating> gameAgeRatings = this.AgeRatings.Select(s => (AgeRating)s).ToList();
|
||||
// return AgeGroups.GetAgeGroupFromAgeRatings(gameAgeRatings);
|
||||
// }
|
||||
// }
|
||||
|
||||
// public CollectionItem.AlwaysIncludeItem InclusionStatus { get; set; }
|
||||
|
||||
// public List<Roms.GameRomItem> Roms { get; set; }
|
||||
|
||||
// public long RomSize
|
||||
// {
|
||||
// get
|
||||
// {
|
||||
// long Size = 0;
|
||||
// foreach (Roms.GameRomItem Rom in Roms)
|
||||
// {
|
||||
// Size += (long)Rom.Size;
|
||||
// }
|
||||
|
||||
// return Size;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
538
gaseous-lib/Classes/Common.cs
Normal file
538
gaseous-lib/Classes/Common.cs
Normal file
|
|
@ -0,0 +1,538 @@
|
|||
using System.Collections.Concurrent;
|
||||
using System.ComponentModel;
|
||||
using System.Data;
|
||||
using System.Drawing;
|
||||
using System.IO.Compression;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.RegularExpressions;
|
||||
using static gaseous_server.Classes.Plugins.PluginManagement.ImageResize;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
public static class Common
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns IfNullValue if the ObjectToCheck is null
|
||||
/// </summary>
|
||||
/// <param name="ObjectToCheck">Any nullable object to check for null</param>
|
||||
/// <param name="IfNullValue">Any object to return if ObjectToCheck is null</param>
|
||||
/// <returns></returns>
|
||||
static public object ReturnValueIfNull(object? ObjectToCheck, object IfNullValue)
|
||||
{
|
||||
if (ObjectToCheck == null || ObjectToCheck == System.DBNull.Value)
|
||||
{
|
||||
return IfNullValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
return ObjectToCheck;
|
||||
}
|
||||
}
|
||||
|
||||
static public DateTime ConvertUnixToDateTime(double UnixTimeStamp)
|
||||
{
|
||||
DateTime dateTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
|
||||
dateTime = dateTime.AddSeconds(UnixTimeStamp).ToLocalTime();
|
||||
return dateTime;
|
||||
}
|
||||
|
||||
public static string StripVersionsFromFileName(string fileName)
|
||||
{
|
||||
// strip anything in brackets
|
||||
fileName = Regex.Replace(fileName, @"\[.*?\]", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"\{.*?\}", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"\(.*?\)", "").Trim();
|
||||
|
||||
// strip versions
|
||||
fileName = Regex.Replace(fileName, @"v(\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Rev (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Revision (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Release (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Build (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Beta (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Alpha (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"RC (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"SP (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Service Pack (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
fileName = Regex.Replace(fileName, @"Set (\d+\.)?(\d+\.)?(\*|\d+)$", "").Trim();
|
||||
|
||||
return fileName;
|
||||
}
|
||||
|
||||
public static long DirSize(DirectoryInfo d)
|
||||
{
|
||||
long size = 0;
|
||||
// Add file sizes.
|
||||
FileInfo[] fis = d.GetFiles();
|
||||
foreach (FileInfo fi in fis)
|
||||
{
|
||||
size += fi.Length;
|
||||
}
|
||||
// Add subdirectory sizes.
|
||||
DirectoryInfo[] dis = d.GetDirectories();
|
||||
foreach (DirectoryInfo di in dis)
|
||||
{
|
||||
size += DirSize(di);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
public static string[] SkippableFiles = {
|
||||
".DS_STORE",
|
||||
"desktop.ini"
|
||||
};
|
||||
|
||||
public static string NormalizePath(string path)
|
||||
{
|
||||
return Path.GetFullPath(new Uri(path).LocalPath)
|
||||
.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
|
||||
}
|
||||
|
||||
public static char[] GetInvalidFileNameChars() => new char[]
|
||||
{
|
||||
'\"', '<', '>', '|', '\0',
|
||||
(char)1, (char)2, (char)3, (char)4, (char)5, (char)6, (char)7, (char)8, (char)9, (char)10,
|
||||
(char)11, (char)12, (char)13, (char)14, (char)15, (char)16, (char)17, (char)18, (char)19, (char)20,
|
||||
(char)21, (char)22, (char)23, (char)24, (char)25, (char)26, (char)27, (char)28, (char)29, (char)30,
|
||||
(char)31, ':', '*', '?', '\\', '/'
|
||||
};
|
||||
|
||||
public static string GetDescription(this Enum value)
|
||||
{
|
||||
return ((DescriptionAttribute)Attribute.GetCustomAttribute(
|
||||
value.GetType().GetFields(BindingFlags.Public | BindingFlags.Static)
|
||||
.Single(x => x.GetValue(null).Equals(value)),
|
||||
typeof(DescriptionAttribute)))?.Description ?? value.ToString();
|
||||
}
|
||||
|
||||
public static Point GetResolution(this Enum value)
|
||||
{
|
||||
string width = ((ResolutionAttribute)Attribute.GetCustomAttribute(
|
||||
value.GetType().GetFields(BindingFlags.Public | BindingFlags.Static)
|
||||
.Single(x => x.GetValue(null).Equals(value)),
|
||||
typeof(ResolutionAttribute)))?.width.ToString() ?? value.ToString();
|
||||
|
||||
string height = ((ResolutionAttribute)Attribute.GetCustomAttribute(
|
||||
value.GetType().GetFields(BindingFlags.Public | BindingFlags.Static)
|
||||
.Single(x => x.GetValue(null).Equals(value)),
|
||||
typeof(ResolutionAttribute)))?.height.ToString() ?? value.ToString();
|
||||
|
||||
return new Point(int.Parse(width), int.Parse(height));
|
||||
}
|
||||
|
||||
public static bool IsNullableEnum(this Type t)
|
||||
{
|
||||
Type u = Nullable.GetUnderlyingType(t);
|
||||
return u != null && u.IsEnum;
|
||||
}
|
||||
|
||||
// compression
|
||||
public static byte[] Compress(byte[] data)
|
||||
{
|
||||
MemoryStream output = new MemoryStream();
|
||||
using (DeflateStream dstream = new DeflateStream(output, CompressionLevel.Optimal))
|
||||
{
|
||||
dstream.Write(data, 0, data.Length);
|
||||
}
|
||||
return output.ToArray();
|
||||
}
|
||||
|
||||
public static byte[] Decompress(byte[] data)
|
||||
{
|
||||
MemoryStream input = new MemoryStream(data);
|
||||
MemoryStream output = new MemoryStream();
|
||||
using (DeflateStream dstream = new DeflateStream(input, CompressionMode.Decompress))
|
||||
{
|
||||
dstream.CopyTo(output);
|
||||
}
|
||||
return output.ToArray();
|
||||
}
|
||||
|
||||
public static object GetEnvVar(string envName, string defaultValue)
|
||||
{
|
||||
if (!String.IsNullOrEmpty(Environment.GetEnvironmentVariable(envName)))
|
||||
{
|
||||
return Environment.GetEnvironmentVariable(envName);
|
||||
}
|
||||
else
|
||||
{
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
public static int GetLookupByCode(LookupTypes LookupType, string Code)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "SELECT Id FROM Lookup" + LookupType.ToString() + " WHERE Code = @code";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>{
|
||||
{ "code", Code }
|
||||
};
|
||||
|
||||
DataTable data = db.ExecuteCMD(sql, dbDict);
|
||||
if (data.Rows.Count == 0)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return (int)data.Rows[0]["Id"];
|
||||
}
|
||||
}
|
||||
|
||||
public static int GetLookupByValue(LookupTypes LookupType, string Value)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "SELECT Id FROM Lookup" + LookupType.ToString() + " WHERE Value = @value";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>{
|
||||
{ "value", Value }
|
||||
};
|
||||
|
||||
DataTable data = db.ExecuteCMD(sql, dbDict);
|
||||
if (data.Rows.Count == 0)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return (int)data.Rows[0]["Id"];
|
||||
}
|
||||
}
|
||||
|
||||
public enum LookupTypes
|
||||
{
|
||||
Country,
|
||||
Language
|
||||
}
|
||||
|
||||
public class RomanNumerals
|
||||
{
|
||||
/// <summary>
|
||||
/// Converts an integer to its Roman numeral representation.
|
||||
/// </summary>
|
||||
/// <param name="number">The integer to convert (1-3999).</param>
|
||||
/// <returns>A string containing the Roman numeral.</returns>
|
||||
public static string IntToRoman(int number)
|
||||
{
|
||||
if (number < 1 || number > 3999)
|
||||
throw new ArgumentOutOfRangeException(nameof(number), "Value must be in the range 1-3999.");
|
||||
|
||||
var numerals = new[]
|
||||
{
|
||||
new { Value = 1000, Numeral = "M" },
|
||||
new { Value = 900, Numeral = "CM" },
|
||||
new { Value = 500, Numeral = "D" },
|
||||
new { Value = 400, Numeral = "CD" },
|
||||
new { Value = 100, Numeral = "C" },
|
||||
new { Value = 90, Numeral = "XC" },
|
||||
new { Value = 50, Numeral = "L" },
|
||||
new { Value = 40, Numeral = "XL" },
|
||||
new { Value = 10, Numeral = "X" },
|
||||
new { Value = 9, Numeral = "IX" },
|
||||
new { Value = 5, Numeral = "V" },
|
||||
new { Value = 4, Numeral = "IV" },
|
||||
new { Value = 1, Numeral = "I" }
|
||||
};
|
||||
|
||||
var result = string.Empty;
|
||||
foreach (var item in numerals)
|
||||
{
|
||||
while (number >= item.Value)
|
||||
{
|
||||
result += item.Numeral;
|
||||
number -= item.Value;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finds the first Roman numeral in a string.
|
||||
/// </summary>
|
||||
/// <param name="input">The input string to search.</param>
|
||||
/// <returns>The first Roman numeral found, or null if none found.</returns>
|
||||
public static string? FindFirstRomanNumeral(string input)
|
||||
{
|
||||
if (string.IsNullOrEmpty(input))
|
||||
return null;
|
||||
|
||||
// Regex for Roman numerals (1-3999, case-insensitive)
|
||||
var matches = Regex.Matches(input, @"\bM{0,3}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})\b", RegexOptions.IgnoreCase);
|
||||
foreach (Match match in matches)
|
||||
{
|
||||
if (match.Success && !string.IsNullOrEmpty(match.Value))
|
||||
return match.Value.ToUpper();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a Roman numeral string to its integer representation.
|
||||
/// </summary>
|
||||
/// <param name="roman">The Roman numeral string to convert.</param>
|
||||
/// <returns>The integer representation of the Roman numeral.</returns>
|
||||
public static int RomanToInt(string roman)
|
||||
{
|
||||
if (string.IsNullOrEmpty(roman))
|
||||
throw new ArgumentException("Input cannot be null or empty.", nameof(roman));
|
||||
|
||||
var romanMap = new Dictionary<char, int>
|
||||
{
|
||||
{ 'I', 1 },
|
||||
{ 'V', 5 },
|
||||
{ 'X', 10 },
|
||||
{ 'L', 50 },
|
||||
{ 'C', 100 },
|
||||
{ 'D', 500 },
|
||||
{ 'M', 1000 }
|
||||
};
|
||||
|
||||
int total = 0;
|
||||
int prevValue = 0;
|
||||
|
||||
foreach (char c in roman.ToUpper())
|
||||
{
|
||||
if (!romanMap.ContainsKey(c))
|
||||
throw new ArgumentException($"Invalid Roman numeral character: {c}", nameof(roman));
|
||||
|
||||
int currentValue = romanMap[c];
|
||||
|
||||
// If the current value is greater than the previous value, subtract twice the previous value
|
||||
// (to account for the addition in the previous iteration).
|
||||
if (currentValue > prevValue)
|
||||
{
|
||||
total += currentValue - 2 * prevValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
total += currentValue;
|
||||
}
|
||||
|
||||
prevValue = currentValue;
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
}
|
||||
|
||||
public class Numbers
|
||||
{
|
||||
private static readonly Dictionary<int, string> NumberWords = new Dictionary<int, string>
|
||||
{
|
||||
{ 0, "Zero" },
|
||||
{ 1, "One" },
|
||||
{ 2, "Two" },
|
||||
{ 3, "Three" },
|
||||
{ 4, "Four" },
|
||||
{ 5, "Five" },
|
||||
{ 6, "Six" },
|
||||
{ 7, "Seven" },
|
||||
{ 8, "Eight" },
|
||||
{ 9, "Nine" },
|
||||
{ 10, "Ten" },
|
||||
{ 11, "Eleven" },
|
||||
{ 12, "Twelve" },
|
||||
{ 13, "Thirteen" },
|
||||
{ 14, "Fourteen" },
|
||||
{ 15, "Fifteen" },
|
||||
{ 16, "Sixteen" },
|
||||
{ 17, "Seventeen" },
|
||||
{ 18, "Eighteen" },
|
||||
{ 19, "Nineteen" },
|
||||
{ 20, "Twenty" },
|
||||
{ 30, "Thirty" },
|
||||
{ 40, "Forty" },
|
||||
{ 50, "Fifty" },
|
||||
{ 60, "Sixty" },
|
||||
{ 70, "Seventy" },
|
||||
{ 80, "Eighty" },
|
||||
{ 90, "Ninety" },
|
||||
{ 100, "Hundred" },
|
||||
{ 1000, "Thousand" },
|
||||
{ 1000000, "Million" },
|
||||
{ 1000000000, "Billion" }
|
||||
};
|
||||
|
||||
private static readonly Dictionary<string, int> WordsToNumber = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
{ "Zero", 0 },
|
||||
{ "One", 1 },
|
||||
{ "Two", 2 },
|
||||
{ "Three", 3 },
|
||||
{ "Four", 4 },
|
||||
{ "Five", 5 },
|
||||
{ "Six", 6 },
|
||||
{ "Seven", 7 },
|
||||
{ "Eight", 8 },
|
||||
{ "Nine", 9 },
|
||||
{ "Ten", 10 },
|
||||
{ "Eleven", 11 },
|
||||
{ "Twelve", 12 },
|
||||
{ "Thirteen", 13 },
|
||||
{ "Fourteen", 14 },
|
||||
{ "Fifteen", 15 },
|
||||
{ "Sixteen", 16 },
|
||||
{ "Seventeen", 17 },
|
||||
{ "Eighteen", 18 },
|
||||
{ "Nineteen", 19 },
|
||||
{ "Twenty", 20 },
|
||||
{ "Thirty", 30 },
|
||||
{ "Forty", 40 },
|
||||
{ "Fifty", 50 },
|
||||
{ "Sixty", 60 },
|
||||
{ "Seventy", 70 },
|
||||
{ "Eighty", 80 },
|
||||
{ "Ninety", 90 },
|
||||
{ "Hundred", 100 },
|
||||
{ "Thousand", 1000 },
|
||||
{ "Million", 1000000 },
|
||||
{ "Billion", 1000000000 }
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Converts a number to its English word representation.
|
||||
/// </summary>
|
||||
/// <param name="number">The number to convert (0 to 999,999,999).</param>
|
||||
/// <returns>The English word representation of the number.</returns>
|
||||
public static string NumberToWords(int number)
|
||||
{
|
||||
if (number < 0 || number > 999999999)
|
||||
throw new ArgumentOutOfRangeException(nameof(number), "Value must be in the range 0-999,999,999.");
|
||||
|
||||
if (number == 0)
|
||||
return "Zero";
|
||||
|
||||
if (NumberWords.TryGetValue(number, out var word))
|
||||
return word;
|
||||
|
||||
List<string> parts = new List<string>();
|
||||
|
||||
// Billions
|
||||
int billions = number / 1000000000;
|
||||
if (billions > 0)
|
||||
{
|
||||
parts.Add(NumberToWords(billions) + " Billion");
|
||||
number %= 1000000000;
|
||||
}
|
||||
|
||||
// Millions
|
||||
int millions = number / 1000000;
|
||||
if (millions > 0)
|
||||
{
|
||||
parts.Add(NumberToWords(millions) + " Million");
|
||||
number %= 1000000;
|
||||
}
|
||||
|
||||
// Thousands
|
||||
int thousands = number / 1000;
|
||||
if (thousands > 0)
|
||||
{
|
||||
parts.Add(NumberToWords(thousands) + " Thousand");
|
||||
number %= 1000;
|
||||
}
|
||||
|
||||
// Hundreds
|
||||
int hundreds = number / 100;
|
||||
if (hundreds > 0)
|
||||
{
|
||||
parts.Add(NumberWords[hundreds] + " Hundred");
|
||||
number %= 100;
|
||||
}
|
||||
|
||||
// Ones and Tens
|
||||
if (number > 0)
|
||||
{
|
||||
if (number < 20)
|
||||
{
|
||||
parts.Add(NumberWords[number]);
|
||||
}
|
||||
else
|
||||
{
|
||||
int tens = number / 10;
|
||||
int ones = number % 10;
|
||||
string tensWord = NumberWords[tens * 10];
|
||||
if (ones > 0)
|
||||
{
|
||||
parts.Add(tensWord + " " + NumberWords[ones]);
|
||||
}
|
||||
else
|
||||
{
|
||||
parts.Add(tensWord);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return string.Join(" ", parts);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts English number words to an integer.
|
||||
/// Handles written forms like "Twenty One", "One Hundred Thirty Four", etc.
|
||||
/// </summary>
|
||||
/// <param name="words">The English words representing a number.</param>
|
||||
/// <returns>The integer representation, or null if conversion fails.</returns>
|
||||
public static int? WordsToNumbers(string words)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(words))
|
||||
return null;
|
||||
|
||||
// Normalize spacing and remove extra whitespace
|
||||
words = Regex.Replace(words.Trim(), @"\s+", " ");
|
||||
string[] tokens = words.Split(' ', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
int result = 0;
|
||||
int current = 0;
|
||||
|
||||
foreach (string token in tokens)
|
||||
{
|
||||
if (!WordsToNumber.TryGetValue(token, out int value))
|
||||
return null; // Invalid token
|
||||
|
||||
if (value >= 1000)
|
||||
{
|
||||
current += result;
|
||||
result = current * value;
|
||||
current = 0;
|
||||
}
|
||||
else if (value == 100)
|
||||
{
|
||||
current *= value;
|
||||
}
|
||||
else
|
||||
{
|
||||
current += value;
|
||||
}
|
||||
}
|
||||
|
||||
result += current;
|
||||
return result >= 0 ? result : null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provides a way to set contextual data that flows with the call and
|
||||
/// async context of a test or invocation.
|
||||
/// </summary>
|
||||
public static class CallContext
|
||||
{
|
||||
static ConcurrentDictionary<string, AsyncLocal<object>> state = new ConcurrentDictionary<string, AsyncLocal<object>>();
|
||||
|
||||
/// <summary>
|
||||
/// Stores a given object and associates it with the specified name.
|
||||
/// </summary>
|
||||
/// <param name="name">The name with which to associate the new item in the call context.</param>
|
||||
/// <param name="data">The object to store in the call context.</param>
|
||||
public static void SetData(string name, object data) =>
|
||||
state.GetOrAdd(name, _ => new AsyncLocal<object>()).Value = data;
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves an object with the specified name from the <see cref="CallContext"/>.
|
||||
/// </summary>
|
||||
/// <param name="name">The name of the item in the call context.</param>
|
||||
/// <returns>The object in the call context associated with the specified name, or <see langword="null"/> if not found.</returns>
|
||||
public static object GetData(string name) =>
|
||||
state.TryGetValue(name, out AsyncLocal<object> data) ? data.Value : null;
|
||||
}
|
||||
}
|
||||
645
gaseous-lib/Classes/Config.cs
Normal file
645
gaseous-lib/Classes/Config.cs
Normal file
|
|
@ -0,0 +1,645 @@
|
|||
using System;
|
||||
using System.Data;
|
||||
using System.Globalization;
|
||||
using Newtonsoft.Json;
|
||||
using gaseous_server.Classes.Metadata;
|
||||
using NuGet.Common;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Central static class for accessing configuration throughout the codebase. This class loads the configuration from the config file on initialization and provides static properties for accessing various configuration sections, as well as a method for updating/saving the config file when changes are made.
|
||||
/// </summary>
|
||||
public static class Config
|
||||
{
|
||||
static ConfigFile _config;
|
||||
|
||||
/// <summary>
|
||||
/// The path where the configuration file and related files (logs, localisation) are stored. By default this is in the user's profile folder under .gaseous-server, but can be overridden by setting the GASEOUS_CONFIG_PATH environment variable (useful for services/containers). The config file itself is stored as config.json within this folder, and a backup of the previous config is stored as config.json.backup when changes are made.
|
||||
/// </summary>
|
||||
public static string ConfigurationPath
|
||||
{
|
||||
get
|
||||
{
|
||||
// Allow override via environment variable for services/containers
|
||||
var overridePath = Environment.GetEnvironmentVariable("GASEOUS_CONFIG_PATH");
|
||||
if (!string.IsNullOrWhiteSpace(overridePath))
|
||||
{
|
||||
return overridePath;
|
||||
}
|
||||
|
||||
return Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".gaseous-server");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The port the web server listens on (Kestrel). Default 5198. Can be overridden by setting the "webport" environment variable or by changing the value in the config file. When running in a container, the environment variable will take precedence over the config file value to allow for easy configuration without needing to modify the config file.
|
||||
/// </summary>
|
||||
public static int ServerPort
|
||||
{
|
||||
get { return _config.ServerPort; }
|
||||
set { _config.ServerPort = value; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The port used for local communication between the main server process and the out-of-process task host. This is used for sending commands and receiving status updates from the out-of-process task host when executing long-running tasks. The default value is 5197, but it can be overridden with the "localcommsport" environment variable or by changing the value in the config file. When running in a container, the environment variable will take precedence over the config file value to allow for easy configuration without needing to modify the config file.
|
||||
/// </summary>
|
||||
public static int LocalCommsPort
|
||||
{
|
||||
get { return _config.LocalCommsPort; }
|
||||
set { _config.LocalCommsPort = value; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The default language/locale the server uses for responses and localisation. This is in the format of a standard locale string, e.g. "en-US" or "fr-FR". The default is "en-US". Can be overridden by setting the "serverlanguage" environment variable or by changing the value in the config file. When running in a container, the environment variable will take precedence over the config file value to allow for easy configuration without needing to modify the config file.
|
||||
/// </summary>
|
||||
public static string ServerLanguage
|
||||
{
|
||||
get { return _config.ServerLanguage; }
|
||||
set { _config.ServerLanguage = value; }
|
||||
}
|
||||
|
||||
static string ConfigurationFilePath
|
||||
{
|
||||
get
|
||||
{
|
||||
return Path.Combine(ConfigurationPath, "config.json");
|
||||
}
|
||||
}
|
||||
|
||||
static string ConfigurationFilePath_Backup
|
||||
{
|
||||
get
|
||||
{
|
||||
return Path.Combine(ConfigurationPath, "config.json.backup");
|
||||
}
|
||||
}
|
||||
|
||||
static string ConfigurationFilePath_Version2
|
||||
{
|
||||
get
|
||||
{
|
||||
return Path.Combine(ConfigurationPath, "configuration.json");
|
||||
}
|
||||
}
|
||||
|
||||
static string ConfigurationFilePath_Backup_Version2
|
||||
{
|
||||
get
|
||||
{
|
||||
return Path.Combine(ConfigurationPath, "configuration.json.backup");
|
||||
}
|
||||
}
|
||||
|
||||
#region Configuration Accessors
|
||||
// These provide easy access to the various configuration sections throughout the codebase without needing to reference the entire config object
|
||||
|
||||
/// <summary>
|
||||
/// The database configuration section of the config, containing all relevant settings for connecting to the database. This is used throughout the codebase wherever database access is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static gaseous_server.Classes.Configuration.Models.Database DatabaseConfiguration
|
||||
{
|
||||
get
|
||||
{
|
||||
return _config.DatabaseConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The library configuration section of the config, containing settings related to the game library management such as file paths, naming templates, and other related settings. This is used throughout the codebase wherever library management is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static gaseous_server.Classes.Configuration.Models.Library LibraryConfiguration
|
||||
{
|
||||
get
|
||||
{
|
||||
return _config.LibraryConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The metadata API configuration section of the config, containing settings related to fetching metadata for games such as which metadata source to use, API keys for external services, and other related settings. This is used throughout the codebase wherever metadata fetching is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static gaseous_server.Classes.Configuration.Models.MetadataAPI MetadataConfiguration
|
||||
{
|
||||
get
|
||||
{
|
||||
return _config.MetadataConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The IGDB configuration section of the config, containing settings specific to fetching metadata from IGDB such as API keys and whether to use the Hasheous proxy. This is used throughout the codebase wherever IGDB metadata fetching is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static gaseous_server.Classes.Configuration.Models.Providers.IGDB IGDB
|
||||
{
|
||||
get
|
||||
{
|
||||
return _config.IGDBConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The social authentication configuration section of the config, containing settings related to enabling social login options for users such as Google, Microsoft, and OIDC providers, as well as whether to allow password login. This is used throughout the codebase wherever authentication is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static gaseous_server.Classes.Configuration.Models.Security.SocialAuth SocialAuthConfiguration
|
||||
{
|
||||
get
|
||||
{
|
||||
return _config.SocialAuthConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The reverse proxy configuration section of the config, containing settings related to running the server behind a reverse proxy such as known proxy IPs/networks and whether to trust forwarded headers. This is used in the server setup to configure ASP.NET Core's Forwarded Headers middleware when the server is running in an environment where a reverse proxy is likely (e.g. in Docker or behind Nginx), and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static gaseous_server.Classes.Configuration.Models.Security.ReverseProxy ReverseProxyConfiguration
|
||||
{
|
||||
get
|
||||
{
|
||||
return _config.ReverseProxyConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The logging configuration section of the config, containing settings related to logging such as log levels, whether to log to file, and other related settings. This is used throughout the codebase wherever logging is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static string LogPath
|
||||
{
|
||||
get
|
||||
{
|
||||
string logPath = Path.Combine(ConfigurationPath, "Logs");
|
||||
if (!Directory.Exists(logPath))
|
||||
{
|
||||
Directory.CreateDirectory(logPath);
|
||||
}
|
||||
return logPath;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The localisation configuration section of the config, containing settings related to localisation such as the default server language/locale and other related settings. This is used throughout the codebase wherever localisation is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static string LocalisationPath
|
||||
{
|
||||
get
|
||||
{
|
||||
string localisationPath = Path.Combine(ConfigurationPath, "Localisation");
|
||||
if (!Directory.Exists(localisationPath))
|
||||
{
|
||||
Directory.CreateDirectory(localisationPath);
|
||||
}
|
||||
return localisationPath;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The file path for the server log file. The log file is stored in the Logs subfolder of the configuration path, and is named "Server Log [UTC Date].txt" where [UTC Date] is the current date in UTC in the format YYYYMMDD. For example, "Server Log 20240101.txt". This allows for easy organization of log files by date. The log file path is used by the logging system when writing logs to file, and is determined based on the current date to ensure logs are separated by day.
|
||||
/// </summary>
|
||||
public static string LogFilePath
|
||||
{
|
||||
get
|
||||
{
|
||||
string logFileExtension = "txt";
|
||||
|
||||
string logPathName = Path.Combine(LogPath, "Server Log " + DateTime.Now.ToUniversalTime().ToString("yyyyMMdd") + "." + logFileExtension);
|
||||
return logPathName;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The logging configuration section of the config, containing settings related to logging such as log levels, whether to log to file, and other related settings. This is used throughout the codebase wherever logging is needed, and is loaded from the config file on initialization of this class.
|
||||
/// </summary>
|
||||
public static gaseous_server.Classes.Configuration.Models.Logging LoggingConfiguration
|
||||
{
|
||||
get
|
||||
{
|
||||
return _config.LoggingConfiguration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The first run status setting, stored in the database, which indicates whether the server is being run for the first time (e.g. for initial setup) or not. This is used to determine whether to show the initial setup page in the web interface and to perform any necessary first-run initialization tasks. The value is stored as a string in the database and can be "0" for not first run, "1" for first run in progress, and "2" for first run completed. This is accessed via the ReadSetting method which reads from the database, and defaults to "0" if not set.
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public static string FirstRunStatus
|
||||
{
|
||||
get
|
||||
{
|
||||
return Config.ReadSetting<string>("FirstRunStatus", "0");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The value to set for the first run status when the initial setup is completed. This is used to update the first run status in the database to indicate that the initial setup has been completed. This value is "2" and is used in the code after the initial setup process is finished to mark that the server has completed its first run initialization.
|
||||
/// </summary>
|
||||
public static string FirstRunStatusWhenSet
|
||||
{
|
||||
get
|
||||
{
|
||||
return "2";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Whether the background task processing system is enabled. This can be used to disable background task processing if needed (e.g. for troubleshooting or if the feature is not desired). When disabled, the timer that processes the queue will not execute any tasks.
|
||||
/// This is a runtime option to prevent running processes until the server is fully up and running.
|
||||
/// </summary>
|
||||
public static bool BackgroundTasksEnabled { get; set; } = false;
|
||||
|
||||
#endregion Configuration Accessors
|
||||
|
||||
/// <summary>
|
||||
/// Static constructor for the Config class. This is called automatically when any member of the Config class is accessed for the first time. It loads the configuration from the config file, and if running in a Docker container, it overrides certain configuration values with environment variables to allow for easy configuration without needing to modify the config file. If the config file does not exist, it creates a new config with default values and saves it. This ensures that the configuration is loaded and available for use throughout the codebase whenever any Config property is accessed.
|
||||
/// </summary>
|
||||
/// <exception cref="Exception">
|
||||
/// This can throw exceptions if there are issues reading the config file (e.g. invalid JSON format) or if required configuration values are missing. It can also throw exceptions if there are issues with the environment variables when running in Docker (e.g. invalid values that cannot be parsed). These exceptions should be handled by the caller to ensure that the server can provide appropriate error messages and fallback behavior if the configuration cannot be loaded successfully.
|
||||
/// </exception>
|
||||
static Config()
|
||||
{
|
||||
if (_config == null)
|
||||
{
|
||||
// load the config file
|
||||
if (File.Exists(ConfigurationFilePath))
|
||||
{
|
||||
string configRaw = File.ReadAllText(ConfigurationFilePath);
|
||||
Newtonsoft.Json.JsonSerializerSettings serializerSettings = new Newtonsoft.Json.JsonSerializerSettings
|
||||
{
|
||||
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
|
||||
MissingMemberHandling = Newtonsoft.Json.MissingMemberHandling.Ignore
|
||||
};
|
||||
ConfigFile? _tempConfig = Newtonsoft.Json.JsonConvert.DeserializeObject<ConfigFile>(configRaw, serializerSettings);
|
||||
if (_tempConfig != null)
|
||||
{
|
||||
_config = _tempConfig;
|
||||
|
||||
// load environment variables if we're in a docker container
|
||||
if (!String.IsNullOrEmpty(Environment.GetEnvironmentVariable("INDOCKER")) && Environment.GetEnvironmentVariable("INDOCKER") == "1")
|
||||
{
|
||||
Console.WriteLine("Running in Docker - setting configuration from variables");
|
||||
_config.DatabaseConfiguration.HostName = (string)Common.GetEnvVar("dbhost", _config.DatabaseConfiguration.HostName);
|
||||
_config.DatabaseConfiguration.UserName = (string)Common.GetEnvVar("dbuser", _config.DatabaseConfiguration.UserName);
|
||||
_config.DatabaseConfiguration.Password = (string)Common.GetEnvVar("dbpass", _config.DatabaseConfiguration.Password);
|
||||
_config.DatabaseConfiguration.DatabaseName = (string)Common.GetEnvVar("dbname", _config.DatabaseConfiguration.DatabaseName);
|
||||
_config.DatabaseConfiguration.Port = int.Parse((string)Common.GetEnvVar("dbport", _config.DatabaseConfiguration.Port.ToString()));
|
||||
|
||||
_config.MetadataConfiguration.DefaultMetadataSource = (FileSignature.MetadataSources)Enum.Parse(typeof(FileSignature.MetadataSources), (string)Common.GetEnvVar("metadatasource", _config.MetadataConfiguration.DefaultMetadataSource.ToString()));
|
||||
_config.IGDBConfiguration.UseHasheousProxy = bool.Parse((string)Common.GetEnvVar("metadatausehasheousproxy", _config.IGDBConfiguration.UseHasheousProxy.ToString()));
|
||||
_config.MetadataConfiguration.SignatureSource = (HasheousClient.Models.MetadataModel.SignatureSources)Enum.Parse(typeof(HasheousClient.Models.MetadataModel.SignatureSources), (string)Common.GetEnvVar("signaturesource", _config.MetadataConfiguration.SignatureSource.ToString())); ;
|
||||
_config.MetadataConfiguration.HasheousHost = (string)Common.GetEnvVar("hasheoushost", _config.MetadataConfiguration.HasheousHost);
|
||||
|
||||
_config.IGDBConfiguration.ClientId = (string)Common.GetEnvVar("igdbclientid", _config.IGDBConfiguration.ClientId);
|
||||
_config.IGDBConfiguration.Secret = (string)Common.GetEnvVar("igdbclientsecret", _config.IGDBConfiguration.Secret);
|
||||
|
||||
_config.SocialAuthConfiguration.PasswordLoginEnabled = bool.Parse((string)Common.GetEnvVar("passwordloginenabled", _config.SocialAuthConfiguration.PasswordLoginEnabled.ToString()));
|
||||
_config.SocialAuthConfiguration.GoogleClientId = (string)Common.GetEnvVar("googleclientid", _config.SocialAuthConfiguration.GoogleClientId);
|
||||
_config.SocialAuthConfiguration.GoogleClientSecret = (string)Common.GetEnvVar("googleclientsecret", _config.SocialAuthConfiguration.GoogleClientSecret);
|
||||
_config.SocialAuthConfiguration.MicrosoftClientId = (string)Common.GetEnvVar("microsoftclientid", _config.SocialAuthConfiguration.MicrosoftClientId);
|
||||
_config.SocialAuthConfiguration.MicrosoftClientSecret = (string)Common.GetEnvVar("microsoftclientsecret", _config.SocialAuthConfiguration.MicrosoftClientSecret);
|
||||
_config.SocialAuthConfiguration.OIDCAuthority = (string)Common.GetEnvVar("oidcauthority", _config.SocialAuthConfiguration.OIDCAuthority);
|
||||
_config.SocialAuthConfiguration.OIDCClientId = (string)Common.GetEnvVar("oidcclientid", _config.SocialAuthConfiguration.OIDCClientId);
|
||||
_config.SocialAuthConfiguration.OIDCClientSecret = (string)Common.GetEnvVar("oidcclientsecret", _config.SocialAuthConfiguration.OIDCClientSecret);
|
||||
|
||||
// reverse proxy configuration (known proxies/networks)
|
||||
// Comma-separated IPs/CIDRs via env when running in Docker
|
||||
var knownProxiesEnv = (string)Common.GetEnvVar("knownproxies", string.Join(",", _config.ReverseProxyConfiguration.KnownProxies ?? new List<string>()));
|
||||
if (!string.IsNullOrWhiteSpace(knownProxiesEnv))
|
||||
{
|
||||
_config.ReverseProxyConfiguration.KnownProxies = knownProxiesEnv
|
||||
.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
var knownNetworksEnv = (string)Common.GetEnvVar("knownnetworks", string.Join(",", _config.ReverseProxyConfiguration.KnownNetworks ?? new List<string>()));
|
||||
if (!string.IsNullOrWhiteSpace(knownNetworksEnv))
|
||||
{
|
||||
_config.ReverseProxyConfiguration.KnownNetworks = knownNetworksEnv
|
||||
.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
||||
.ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("There was an error reading the config file: Json returned null");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// no config file!
|
||||
// use defaults and save
|
||||
_config = new ConfigFile();
|
||||
UpdateConfig();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Saves any updates made to the configuration back to the config file. This should be called whenever changes are made to the configuration properties that need to be persisted. It serializes the current state of the _config object to JSON and writes it to the config file path. Before writing, it creates a backup of the existing config file by renaming it with a .backup extension. This ensures that if there are any issues with the new config (e.g. invalid JSON format), the previous config can be restored from the backup file. This method should be used whenever changes are made to the configuration that need to be saved, such as when updating settings through an admin interface or when making changes programmatically.
|
||||
/// </summary>
|
||||
public static void UpdateConfig()
|
||||
{
|
||||
// save any updates to the configuration
|
||||
Newtonsoft.Json.JsonSerializerSettings serializerSettings = new Newtonsoft.Json.JsonSerializerSettings
|
||||
{
|
||||
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
|
||||
Formatting = Newtonsoft.Json.Formatting.Indented
|
||||
};
|
||||
serializerSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter());
|
||||
string configRaw = Newtonsoft.Json.JsonConvert.SerializeObject(_config, serializerSettings);
|
||||
|
||||
if (!Directory.Exists(ConfigurationPath))
|
||||
{
|
||||
Directory.CreateDirectory(ConfigurationPath);
|
||||
}
|
||||
|
||||
if (File.Exists(ConfigurationFilePath_Backup))
|
||||
{
|
||||
File.Delete(ConfigurationFilePath_Backup);
|
||||
}
|
||||
if (File.Exists(ConfigurationFilePath))
|
||||
{
|
||||
File.Move(ConfigurationFilePath, ConfigurationFilePath_Backup);
|
||||
}
|
||||
File.WriteAllText(ConfigurationFilePath, configRaw);
|
||||
}
|
||||
|
||||
private static Dictionary<string, object> AppSettings = new Dictionary<string, object>();
|
||||
|
||||
private static bool IsDateSettingType(Type type)
|
||||
{
|
||||
return (Nullable.GetUnderlyingType(type) ?? type) == typeof(DateTime);
|
||||
}
|
||||
|
||||
private static T ConvertSettingValue<T>(object value)
|
||||
{
|
||||
Type targetType = typeof(T);
|
||||
Type effectiveTargetType = Nullable.GetUnderlyingType(targetType) ?? targetType;
|
||||
|
||||
if (value == null || value == DBNull.Value)
|
||||
{
|
||||
if (!targetType.IsValueType || Nullable.GetUnderlyingType(targetType) != null)
|
||||
{
|
||||
return default!;
|
||||
}
|
||||
|
||||
throw new InvalidCastException($"Cannot convert a null setting value to {targetType.FullName}.");
|
||||
}
|
||||
|
||||
if (value is T typedValue)
|
||||
{
|
||||
return typedValue;
|
||||
}
|
||||
|
||||
object convertedValue;
|
||||
|
||||
if (effectiveTargetType == typeof(DateTime))
|
||||
{
|
||||
if (value is DateTime dateTimeValue)
|
||||
{
|
||||
convertedValue = dateTimeValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
convertedValue = DateTime.Parse(value.ToString() ?? string.Empty, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind | DateTimeStyles.AllowWhiteSpaces);
|
||||
}
|
||||
}
|
||||
else if (effectiveTargetType == typeof(bool))
|
||||
{
|
||||
if (value is bool boolValue)
|
||||
{
|
||||
convertedValue = boolValue;
|
||||
}
|
||||
else if (value is sbyte signedByteValue)
|
||||
{
|
||||
convertedValue = signedByteValue != 0;
|
||||
}
|
||||
else if (value is byte byteValue)
|
||||
{
|
||||
convertedValue = byteValue != 0;
|
||||
}
|
||||
else if (value is short shortValue)
|
||||
{
|
||||
convertedValue = shortValue != 0;
|
||||
}
|
||||
else if (value is int intValue)
|
||||
{
|
||||
convertedValue = intValue != 0;
|
||||
}
|
||||
else if (value is long longValue)
|
||||
{
|
||||
convertedValue = longValue != 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
convertedValue = bool.Parse(value.ToString() ?? string.Empty);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
convertedValue = Convert.ChangeType(value, effectiveTargetType, CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
return (T)(object)convertedValue;
|
||||
}
|
||||
|
||||
private static object GetStoredSettingValue(DataRow row)
|
||||
{
|
||||
int valueType = row["ValueType"] == DBNull.Value
|
||||
? 0
|
||||
: Convert.ToInt32(row["ValueType"], CultureInfo.InvariantCulture);
|
||||
|
||||
return valueType == 1 ? row["ValueDate"] : row["Value"];
|
||||
}
|
||||
|
||||
private static Dictionary<string, object> BuildSettingWriteParameters<T>(string settingName, T value)
|
||||
{
|
||||
bool isDateValue = IsDateSettingType(typeof(T)) || value is DateTime;
|
||||
object storedValue = value is null ? DBNull.Value : value;
|
||||
|
||||
return new Dictionary<string, object>
|
||||
{
|
||||
{ "SettingName", settingName },
|
||||
{ "ValueType", isDateValue ? 1 : 0 },
|
||||
{ "Value", isDateValue ? DBNull.Value : storedValue },
|
||||
{ "ValueDate", isDateValue ? storedValue : DBNull.Value }
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes the application settings by loading them from the database. This method reads all settings from the Settings table in the database and stores them in the AppSettings dictionary for quick access. It handles different value types (string and datetime) based on the database schema version, and logs any exceptions that occur during the reading of settings. If a setting cannot be read due to an invalid cast (e.g. due to a schema change during an upgrade), it deletes the broken setting from the database and logs a warning, allowing the application to reset it to a default value when accessed again. This method should be called during application startup after the database connection is established to ensure that all settings are loaded and available for use throughout the application.
|
||||
/// </summary>
|
||||
public static void InitSettings()
|
||||
{
|
||||
// Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
// string sql = "SELECT * FROM Settings";
|
||||
|
||||
// DataTable dbResponse = db.ExecuteCMD(sql);
|
||||
// foreach (DataRow dataRow in dbResponse.Rows)
|
||||
// {
|
||||
// string SettingName = (string)dataRow["Setting"];
|
||||
|
||||
// if (AppSettings.ContainsKey(SettingName))
|
||||
// {
|
||||
// AppSettings.Remove(SettingName);
|
||||
// }
|
||||
|
||||
// try
|
||||
// {
|
||||
// if (Database.schema_version >= 1016)
|
||||
// {
|
||||
// switch ((int)dataRow["ValueType"])
|
||||
// {
|
||||
// default:
|
||||
// // value is a string
|
||||
// AppSettings.Add(SettingName, dataRow["Value"]);
|
||||
// break;
|
||||
|
||||
// case 1:
|
||||
// // value is a datetime
|
||||
// AppSettings.Add(SettingName, dataRow["ValueDate"]);
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// AppSettings.Add(SettingName, dataRow["Value"]);
|
||||
// }
|
||||
// }
|
||||
// catch (InvalidCastException castEx)
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Warning, "process.settings", "settings.exception_when_reading_server_setting_resetting_to_default", null, new string[] { SettingName }, castEx);
|
||||
|
||||
// // delete broken setting and return the default
|
||||
// // this error is probably generated during an upgrade
|
||||
// sql = "DELETE FROM Settings WHERE Setting = @SettingName";
|
||||
// Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
// {
|
||||
// { "SettingName", SettingName }
|
||||
// };
|
||||
// db.ExecuteCMD(sql, dbDict);
|
||||
// }
|
||||
// catch (Exception ex)
|
||||
// {
|
||||
// Logging.LogKey(Logging.LogType.Critical, "process.settings", "settings.exception_when_reading_server_setting", null, new string[] { SettingName }, ex);
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads a setting value from the AppSettings dictionary, which is loaded from the database on application startup. If the setting is not found in the AppSettings dictionary, it attempts to read it from the database. If the setting is not found in the database, it returns the provided default value and saves that default value to the database for future access. This method also handles caching of settings in the AppSettings dictionary for quick access, and logs any exceptions that occur during the reading of settings. If an invalid cast exception occurs (e.g. due to a schema change during an upgrade), it deletes the broken setting from the database and logs a warning, allowing the application to reset it to a default value when accessed again. This method should be used throughout the codebase whenever access to a setting value is needed, as it ensures that settings are read from the database if not already cached, and that default values are used and saved when settings are missing.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">
|
||||
/// The type of the setting value.
|
||||
/// </typeparam>
|
||||
/// <param name="SettingName">The name of the setting to read.</param>
|
||||
/// <param name="DefaultValue">The default value to return if the setting is not found.</param>
|
||||
/// <returns>The value of the setting, or the default value if the setting is not found.</returns>
|
||||
public static T ReadSetting<T>(string SettingName, T DefaultValue)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
try
|
||||
{
|
||||
if (AppSettings.ContainsKey(SettingName))
|
||||
{
|
||||
var cachedValue = AppSettings[SettingName];
|
||||
return ConvertSettingValue<T>(cachedValue);
|
||||
}
|
||||
else
|
||||
{
|
||||
string sql;
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "SettingName", SettingName }
|
||||
};
|
||||
DataTable dbResponse;
|
||||
|
||||
try
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.reading_setting", null, new string[] { SettingName });
|
||||
|
||||
sql = "SELECT ValueType, Value, ValueDate FROM Settings WHERE Setting = @SettingName";
|
||||
|
||||
dbResponse = db.ExecuteCMD(sql, dbDict);
|
||||
if (dbResponse.Rows.Count == 0)
|
||||
{
|
||||
// no value with that name stored - respond with the default value
|
||||
SetSetting<T>(SettingName, DefaultValue);
|
||||
return DefaultValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
object storedValue = GetStoredSettingValue(dbResponse.Rows[0]);
|
||||
|
||||
AppSettings.Add(SettingName, storedValue);
|
||||
return ConvertSettingValue<T>(storedValue);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database", "database.failed_reading_setting", null, new string[] { SettingName }, ex);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (InvalidCastException castEx)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.settings", "settings.exception_when_reading_server_setting_resetting_to_default", null, new string[] { SettingName }, castEx);
|
||||
|
||||
// delete broken setting and return the default
|
||||
// this error is probably generated during an upgrade
|
||||
if (AppSettings.ContainsKey(SettingName))
|
||||
{
|
||||
AppSettings.Remove(SettingName);
|
||||
}
|
||||
|
||||
string sql = "DELETE FROM Settings WHERE Setting = @SettingName";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "SettingName", SettingName }
|
||||
};
|
||||
db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
return DefaultValue;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.settings", "settings.exception_when_reading_server_setting", null, new string[] { SettingName }, ex);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Writes a setting value to the database and updates the AppSettings dictionary. This method takes a setting name and value, and saves it to the Settings table in the database. It handles different value types (string and datetime) based on the database schema version, and logs any exceptions that occur during the saving of settings. If an exception occurs, it is logged and rethrown to be handled by the caller. This method should be used whenever a setting value needs to be updated or saved, as it ensures that the new value is persisted to the database and that the AppSettings cache is updated accordingly for quick access.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">The type of the setting value.</typeparam>
|
||||
/// <param name="SettingName">The name of the setting to write.</param>
|
||||
/// <param name="Value">The value to write to the setting.</param>
|
||||
public static void SetSetting<T>(string SettingName, T Value)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
object storedValue = Value is null ? DBNull.Value : Value;
|
||||
string sql = "REPLACE INTO Settings (Setting, ValueType, Value, ValueDate) VALUES (@SettingName, @ValueType, @Value, @ValueDate)";
|
||||
Dictionary<string, object> dbDict = BuildSettingWriteParameters(SettingName, Value);
|
||||
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.storing_setting_to_value", null, new string[] { SettingName, Value?.ToString() ?? "" });
|
||||
try
|
||||
{
|
||||
db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
if (AppSettings.ContainsKey(SettingName))
|
||||
{
|
||||
AppSettings[SettingName] = storedValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
AppSettings.Add(SettingName, storedValue);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database", "database.failed_storing_setting", null, new string[] { SettingName }, ex);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
857
gaseous-lib/Classes/ContentManager.cs
Normal file
857
gaseous-lib/Classes/ContentManager.cs
Normal file
|
|
@ -0,0 +1,857 @@
|
|||
using System.Data;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Models;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using Microsoft.CodeAnalysis.CSharp.Syntax;
|
||||
|
||||
namespace gaseous_server.Classes.Content
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides configuration and access to different types of user-submitted or managed content.
|
||||
/// </summary>
|
||||
public class ContentManager
|
||||
{
|
||||
private const string AttachmentIdParam = "@attachmentid";
|
||||
private const string RoleAdmin = "Admin";
|
||||
private const string SystemUserId = "System";
|
||||
/// <summary>
|
||||
/// Enumerates the supported types of content.
|
||||
/// </summary>
|
||||
public enum ContentType
|
||||
{
|
||||
/// <summary>Screenshots or still images.</summary>
|
||||
Screenshot = 0,
|
||||
/// <summary>Video content.</summary>
|
||||
Video = 1,
|
||||
/// <summary>Short audio sample content (e.g., previews, sound bites).</summary>
|
||||
AudioSample = 2,
|
||||
/// <summary>Manuals or documentation files. Available to all users.</summary>
|
||||
GlobalManual = 3,
|
||||
/// <summary>Photos, such as box art or promotional images.</summary>
|
||||
Photo = 4,
|
||||
/// <summary>Notes or text-based content.</summary>
|
||||
Note = 5,
|
||||
/// <summary>Miscellaneous content not covered by other types.</summary>
|
||||
Misc = 100
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for a specific content type, including platform restrictions.
|
||||
/// </summary>
|
||||
public class ContentConfiguration
|
||||
{
|
||||
/// <summary>
|
||||
/// Optional list of platform IDs this content type is limited to; an empty list means no restriction.
|
||||
/// </summary>
|
||||
public List<long> LimitToPlatformIds { get; set; } = new List<long>(); // empty list means no limit
|
||||
|
||||
/// <summary>
|
||||
/// Optional list of user roles allowed to manage this content type; an empty list means no role restriction.
|
||||
/// </summary>
|
||||
public List<string> AllowedRoles { get; set; } = new List<string>(); // empty list means no role restriction
|
||||
|
||||
/// <summary>
|
||||
/// Defines if the content type is user or system managed. Default is user managed. If system managed, AllowedRoles must contain "Admin".
|
||||
/// </summary>
|
||||
public bool IsUserManaged { get; set; } = true; // if false, AllowedRoles must contain "Admin"
|
||||
|
||||
/// <summary>
|
||||
/// Defines if the content is shareable between users. Default is false. If true, content will only be viewable via an ACL check (not implemented yet).
|
||||
/// </summary>
|
||||
public bool IsShareable { get; set; } = false; // if true, content will only be viewable via an ACL check (not implemented yet)
|
||||
}
|
||||
|
||||
private static readonly Dictionary<ContentType, ContentConfiguration> _contentConfigurations = new()
|
||||
{
|
||||
{ ContentType.Screenshot, new ContentConfiguration() {
|
||||
LimitToPlatformIds = new List<long>(),
|
||||
IsUserManaged = true,
|
||||
IsShareable = true
|
||||
} },
|
||||
{ ContentType.Video, new ContentConfiguration() {
|
||||
LimitToPlatformIds = new List<long>(),
|
||||
IsUserManaged = true,
|
||||
IsShareable = true
|
||||
} },
|
||||
{ ContentType.AudioSample, new ContentConfiguration() {
|
||||
LimitToPlatformIds = new List<long>{ 52 },
|
||||
AllowedRoles = new List<string>{ RoleAdmin },
|
||||
IsUserManaged = false
|
||||
} },
|
||||
{ ContentType.GlobalManual, new ContentConfiguration() {
|
||||
LimitToPlatformIds = new List<long>(),
|
||||
AllowedRoles = new List<string>{ RoleAdmin },
|
||||
IsUserManaged = false
|
||||
} },
|
||||
{ ContentType.Photo, new ContentConfiguration() {
|
||||
LimitToPlatformIds = new List<long>(),
|
||||
IsUserManaged = true,
|
||||
IsShareable = true
|
||||
} },
|
||||
{ ContentType.Note, new ContentConfiguration() {
|
||||
LimitToPlatformIds = new List<long>(),
|
||||
IsUserManaged = true,
|
||||
IsShareable = false
|
||||
} },
|
||||
{ ContentType.Misc, new ContentConfiguration() {
|
||||
LimitToPlatformIds = new List<long>()
|
||||
} }
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Read-only access to the configured content type definitions.
|
||||
/// </summary>
|
||||
public static IReadOnlyDictionary<ContentType, ContentConfiguration> ContentConfigurations => _contentConfigurations;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a given content type is allowed to be uploaded. If no rule is defned, it is allowed by default.
|
||||
/// </summary>
|
||||
/// <param name="contentType">The content type to check.</param>
|
||||
/// <param name="platformId">Optional platform ID to check against platform restrictions.</param>
|
||||
/// <param name="userRoles">Optional list of user roles to check against role restrictions.</param>
|
||||
/// <returns>True if the content type is allowed under the given conditions; otherwise, false.</returns>
|
||||
private static bool IsContentTypeUploadable(ContentType contentType, long? platformId = null, List<string>? userRoles = null)
|
||||
{
|
||||
if (!_contentConfigurations.TryGetValue(contentType, out var config))
|
||||
{
|
||||
// If no configuration exists for the content type, allow by default
|
||||
return true;
|
||||
}
|
||||
// Check platform restrictions
|
||||
if (config.LimitToPlatformIds.Any() && platformId.HasValue && !config.LimitToPlatformIds.Contains(platformId.Value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
// Check role restrictions
|
||||
if (config.AllowedRoles.Any() && (userRoles == null || !userRoles.Intersect(config.AllowedRoles).Any()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds user-submitted content associated with a metadata item, saving the file and recording metadata in the database.
|
||||
/// </summary>
|
||||
/// <param name="metadataId">The ID of the metadata item to associate the content with.</param>
|
||||
/// <param name="contentModel">The content model containing the content data and metadata.</param>
|
||||
/// <param name="user">The user submitting the content; can be null for GlobalManual content type if the user has Admin role.</param>
|
||||
/// <returns>The ID of the newly created content attachment record.</returns>
|
||||
/// <exception cref="InvalidOperationException">Thrown if validation fails or database insertion fails.</exception>
|
||||
public static async Task<long> AddMetadataItemContent(long metadataId, ContentModel contentModel, Authentication.ApplicationUser? user = null)
|
||||
{
|
||||
// get metadata map to determine platform
|
||||
var metadataMap = await MetadataManagement.GetMetadataMap(metadataId);
|
||||
if (metadataMap == null)
|
||||
{
|
||||
throw new InvalidOperationException($"Metadata map not found for id {metadataId}.");
|
||||
}
|
||||
var platformId = metadataMap.PlatformId;
|
||||
|
||||
// get user roles if user is provided
|
||||
string userId = "";
|
||||
List<string> userRoles = new List<string>();
|
||||
if (user == null)
|
||||
{
|
||||
throw new InvalidOperationException("User must be provided.");
|
||||
}
|
||||
|
||||
// get user roles
|
||||
var userStore = new Authentication.UserStore();
|
||||
userRoles = (await userStore.GetRolesAsync(user, new CancellationToken())).ToList();
|
||||
userId = user.Id;
|
||||
|
||||
// validate content type is allowed
|
||||
if (!IsContentTypeUploadable(contentModel.ContentType, platformId, userRoles))
|
||||
{
|
||||
throw new InvalidOperationException($"Content type {contentModel.ContentType} is not allowed for the given platform or user roles.");
|
||||
}
|
||||
|
||||
// validate content
|
||||
ContentType contentType = contentModel.ContentType;
|
||||
byte[] contentData = contentModel.ByteArray;
|
||||
try
|
||||
{
|
||||
ValidateContent(ref contentType, ref contentData);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException("Content validation failed: " + ex.Message, ex);
|
||||
}
|
||||
|
||||
var contentTypeConfig = ContentConfigurations[contentType];
|
||||
if (!contentTypeConfig.IsUserManaged)
|
||||
{
|
||||
// if content is system managed, only allow Admin users to upload
|
||||
if (!userRoles.Contains(RoleAdmin))
|
||||
{
|
||||
throw new InvalidOperationException("Only Admin users can upload system managed content.");
|
||||
}
|
||||
userId = SystemUserId; // set userId to System for system managed content
|
||||
}
|
||||
|
||||
// save file to disk
|
||||
string userDirectory = userId == SystemUserId ? "Global" : userId;
|
||||
string contentDir = contentType == ContentType.GlobalManual ? "manuals" : contentType.ToString().ToLower() + "s";
|
||||
string dirPath = Path.Combine(Config.LibraryConfiguration.LibraryContentDirectory, userDirectory, contentDir);
|
||||
if (!Directory.Exists(dirPath))
|
||||
{
|
||||
Directory.CreateDirectory(dirPath);
|
||||
}
|
||||
string fileName = $"{Guid.NewGuid()}"; // use a GUID for the filename to avoid collisions
|
||||
string filePath = Path.Combine(dirPath, fileName);
|
||||
await System.IO.File.WriteAllBytesAsync(filePath, contentData);
|
||||
|
||||
// compute SHA1 hash of file
|
||||
using var sha1 = System.Security.Cryptography.SHA1.Create();
|
||||
byte[] fileHashBytes = sha1.ComputeHash(contentData);
|
||||
string fileHash = BitConverter.ToString(fileHashBytes).Replace("-", "").ToLowerInvariant();
|
||||
|
||||
// Save content metadata to database, associating with metadataId and user if provided
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "INSERT INTO MetadataMap_Attachments (MetadataMapID, AttachmentType, UserId, SHA1, Filename, FileSystemFilename, Size) VALUES (@MetadataMapID, @AttachmentType, @UserId, @SHA1, @filename, @filesystemfilename, @size); SELECT LAST_INSERT_ID();";
|
||||
var parameters = new Dictionary<string, object>
|
||||
{
|
||||
{ "@MetadataMapID", metadataId },
|
||||
{ "@AttachmentType", (int)contentType },
|
||||
{ "@UserId", userId },
|
||||
{ "@SHA1", fileHash },
|
||||
{ "@filename", contentModel.Filename },
|
||||
{ "@filesystemfilename", System.IO.Path.GetFileName(filePath) },
|
||||
{ "@size", new System.IO.FileInfo(filePath).Length }
|
||||
};
|
||||
var result = await db.ExecuteCMDAsync(sql, parameters);
|
||||
if (result.Rows.Count > 0)
|
||||
{
|
||||
long newId = Convert.ToInt64(result.Rows[0][0]);
|
||||
RefreshNotificationSignal.MarkContentChanged();
|
||||
return newId;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Failed to insert content metadata into the database.");
|
||||
}
|
||||
}
|
||||
private static void ValidateContent(ref ContentType contentType, ref byte[] contentData)
|
||||
{
|
||||
switch (contentType)
|
||||
{
|
||||
case ContentType.Screenshot:
|
||||
case ContentType.Photo:
|
||||
ValidateImageContent(ref contentType, ref contentData);
|
||||
break;
|
||||
case ContentType.Video:
|
||||
ValidateVideoContent(ref contentData);
|
||||
break;
|
||||
case ContentType.Note:
|
||||
ValidateNoteContent(ref contentData);
|
||||
break;
|
||||
case ContentType.GlobalManual:
|
||||
ValidatePdfContent(contentData);
|
||||
break;
|
||||
case ContentType.AudioSample:
|
||||
ValidateAudioSample(contentData);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidOperationException("Unsupported content type.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void ValidateImageContent(ref ContentType contentType, ref byte[] contentData)
|
||||
{
|
||||
try
|
||||
{
|
||||
bool isGif = contentData.Length >= 6 &&
|
||||
contentData[0] == 'G' && contentData[1] == 'I' && contentData[2] == 'F' &&
|
||||
contentData[3] == '8' &&
|
||||
(contentData[4] == '7' || contentData[4] == '9') &&
|
||||
contentData[5] == 'a';
|
||||
bool converted = false;
|
||||
if (isGif)
|
||||
{
|
||||
using var gifStream = new MemoryStream(contentData);
|
||||
var collection = new ImageMagick.MagickImageCollection();
|
||||
collection.Read(gifStream);
|
||||
if (collection.Count > 1)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Collect frame delays (in 1/100s of a second units per GIF spec)
|
||||
var delays = collection.Select(f => (int)f.AnimationDelay).Where(d => d > 0).ToList();
|
||||
double avgDelay = delays.Any() ? delays.Average() : 10d; // default 10 (i.e. 100ms) if missing
|
||||
double derivedFps = 100.0 / avgDelay; // 100 * (1/avgDelayHundredths) = frames per second
|
||||
if (derivedFps < 5) derivedFps = 5;
|
||||
if (derivedFps > 60) derivedFps = 60;
|
||||
|
||||
// Prefer an integer fps for ffmpeg stability
|
||||
int targetFps = (int)Math.Round(derivedFps);
|
||||
if (targetFps < 5) targetFps = 5;
|
||||
if (targetFps > 60) targetFps = 60;
|
||||
|
||||
// Execute ffmpeg conversion
|
||||
byte[]? mp4Bytes = ConvertGifToMp4WithFfmpeg(contentData, targetFps);
|
||||
if (mp4Bytes != null && mp4Bytes.Length > 0)
|
||||
{
|
||||
contentData = mp4Bytes;
|
||||
contentType = ContentType.Video;
|
||||
converted = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.content_manager", "contentmanager.animated_gif_conversion_empty_output_fallback_png");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.content_manager", "contentmanager.animated_gif_conversion_exception_fallback_png", null, new string[] { ex.Message });
|
||||
}
|
||||
}
|
||||
collection.Dispose();
|
||||
}
|
||||
if (!converted)
|
||||
{
|
||||
using var inputStream = new MemoryStream(contentData);
|
||||
using var image = new ImageMagick.MagickImage(inputStream);
|
||||
image.Format = ImageMagick.MagickFormat.Png;
|
||||
using var ms = new MemoryStream();
|
||||
image.Write(ms);
|
||||
contentData = ms.ToArray();
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException("Invalid image data.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void ValidateVideoContent(ref byte[] contentData)
|
||||
{
|
||||
try
|
||||
{
|
||||
bool isMp4 = contentData.Length > 12 &&
|
||||
contentData[4] == (byte)'f' &&
|
||||
contentData[5] == (byte)'t' &&
|
||||
contentData[6] == (byte)'y' &&
|
||||
contentData[7] == (byte)'p';
|
||||
if (!isMp4)
|
||||
{
|
||||
using var inputStream = new MemoryStream(contentData);
|
||||
var collection = new ImageMagick.MagickImageCollection();
|
||||
collection.Read(inputStream);
|
||||
if (collection.Count == 0)
|
||||
throw new InvalidOperationException("No frames detected in video source.");
|
||||
collection.Coalesce();
|
||||
// Ensure even dimensions
|
||||
int width = (int)collection[0].Width;
|
||||
int height = (int)collection[0].Height;
|
||||
int newWidth = (width % 2 == 0) ? width : width + 1;
|
||||
int newHeight = (height % 2 == 0) ? height : height + 1;
|
||||
if (newWidth != width || newHeight != height)
|
||||
{
|
||||
var normalized = new ImageMagick.MagickImageCollection();
|
||||
foreach (var frame in collection)
|
||||
{
|
||||
var canvas = new ImageMagick.MagickImage(ImageMagick.MagickColors.Transparent, (uint)newWidth, (uint)newHeight);
|
||||
canvas.Composite(frame, 0, 0);
|
||||
canvas.AnimationDelay = frame.AnimationDelay;
|
||||
normalized.Add(canvas);
|
||||
}
|
||||
collection.Dispose();
|
||||
collection = normalized;
|
||||
}
|
||||
using var outputStream = new MemoryStream();
|
||||
collection.Write(outputStream, ImageMagick.MagickFormat.Mp4);
|
||||
contentData = outputStream.ToArray();
|
||||
collection.Dispose();
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException("Invalid or unsupported video data.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void ValidateNoteContent(ref byte[] contentData)
|
||||
{
|
||||
try
|
||||
{
|
||||
var text = System.Text.Encoding.UTF8.GetString(contentData);
|
||||
contentData = System.Text.Encoding.UTF8.GetBytes(text);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException("Invalid text data.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void ValidatePdfContent(byte[] contentData)
|
||||
{
|
||||
if (contentData.Length < 5 || !(contentData[0] == '%' && contentData[1] == 'P' && contentData[2] == 'D' && contentData[3] == 'F' && contentData[4] == '-'))
|
||||
{
|
||||
throw new InvalidOperationException("Invalid PDF data.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void ValidateAudioSample(byte[] contentData)
|
||||
{
|
||||
if (contentData.Length < 4 || !(contentData[0] == 'P' && contentData[1] == 'K' && contentData[2] == 3 && contentData[3] == 4))
|
||||
{
|
||||
throw new InvalidOperationException("Invalid ZIP data.");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a list of content attachments associated with the specified metadata item IDs that the user has access to.
|
||||
/// </summary>
|
||||
/// <param name="metadataIds">List of metadata item IDs to retrieve content for.</param>
|
||||
/// <param name="user">The user requesting the content; used for access control.</param>
|
||||
/// <param name="contentTypes">Optional list of content types to filter by; if null, all types are returned.</param>
|
||||
/// <param name="page">The page number for pagination (1-based).</param>
|
||||
/// <param name="pageSize">The number of items per page for pagination.</param>
|
||||
/// <returns>List of ContentViewModel representing the accessible content attachments.</returns>
|
||||
/// <exception cref="ArgumentException">Thrown if parameters are invalid.</exception>
|
||||
public static async Task<ContentViewModel> GetMetadataItemContents(List<long> metadataIds, Authentication.ApplicationUser user, List<ContentType>? contentTypes, int page = 1, int pageSize = 50)
|
||||
{
|
||||
if (metadataIds == null || metadataIds.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("metadataIds cannot be null or empty");
|
||||
}
|
||||
|
||||
if (contentTypes == null)
|
||||
{
|
||||
throw new ArgumentException("contentTypes cannot be null");
|
||||
}
|
||||
|
||||
ContentViewModel contentViewModel = new ContentViewModel
|
||||
{
|
||||
Items = new List<ContentViewModel.ContentViewItemModel>(),
|
||||
TotalCount = 0,
|
||||
Page = page,
|
||||
PageSize = pageSize
|
||||
};
|
||||
|
||||
if (contentTypes.Count == 0)
|
||||
{
|
||||
// if no content types specified, return empty list
|
||||
return contentViewModel;
|
||||
}
|
||||
|
||||
if (user == null)
|
||||
{
|
||||
throw new ArgumentException("user cannot be null");
|
||||
}
|
||||
|
||||
// get total count for pagination
|
||||
Database dbCount = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string countSql = "SELECT COUNT(*) FROM MetadataMap_Attachments WHERE MetadataMapID IN (" + string.Join(",", metadataIds) + ") AND AttachmentType IN (" + string.Join(",", contentTypes.Select(ct => (int)ct)) + ") AND (UserId = @userid OR UserId = @systemuserid);";
|
||||
var countParameters = new Dictionary<string, object>
|
||||
{
|
||||
{ "@userid", user.Id },
|
||||
{ "@systemuserid", "System" }
|
||||
};
|
||||
var countResult = await dbCount.ExecuteCMDAsync(countSql, countParameters);
|
||||
if (countResult.Rows.Count > 0)
|
||||
{
|
||||
contentViewModel.TotalCount = Convert.ToInt32(countResult.Rows[0][0]);
|
||||
}
|
||||
|
||||
// get the data
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "SELECT * FROM MetadataMap_Attachments WHERE MetadataMapID IN (" + string.Join(",", metadataIds) + ") AND AttachmentType IN (" + string.Join(",", contentTypes.Select(ct => (int)ct)) + ") AND (UserId = @userid OR UserId = @systemuserid) ORDER BY DateCreated DESC LIMIT @offset, @pagesize;";
|
||||
var parameters = new Dictionary<string, object>
|
||||
{
|
||||
{ "@userid", user.Id },
|
||||
{ "@systemuserid", "System" },
|
||||
{ "@offset", (page - 1) * pageSize },
|
||||
{ "@pagesize", pageSize }
|
||||
};
|
||||
var result = await db.ExecuteCMDAsync(sql, parameters);
|
||||
List<ContentViewModel.ContentViewItemModel> contents = new List<ContentViewModel.ContentViewItemModel>();
|
||||
foreach (DataRow row in result.Rows)
|
||||
{
|
||||
contents.Add(await BuildContentView(row));
|
||||
}
|
||||
|
||||
// add to view model
|
||||
contentViewModel.Items = contents;
|
||||
|
||||
return contentViewModel;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a specific content attachment by its ID if the user has access to it.
|
||||
/// </summary>
|
||||
/// <param name="attachmentId">The ID of the content attachment to retrieve.</param>
|
||||
/// <param name="user">The user requesting the content; used for access control.</param>
|
||||
/// <returns>The ContentViewModel representing the content attachment.</returns>
|
||||
/// <exception cref="InvalidOperationException">Thrown if the attachment is not found or access is denied.</exception>
|
||||
public static async Task<ContentViewModel.ContentViewItemModel> GetMetadataItemContent(long attachmentId, Authentication.ApplicationUser user)
|
||||
{
|
||||
// return the requested attachment if the user has access to it
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = $"SELECT * FROM MetadataMap_Attachments WHERE AttachmentID = {AttachmentIdParam} AND ((UserId = @userid OR UserId = @systemuserid) OR IsShared = @isshared);";
|
||||
var parameters = new Dictionary<string, object>
|
||||
{
|
||||
{ AttachmentIdParam, attachmentId },
|
||||
{ "@userid", user.Id },
|
||||
{ "@systemuserid", "System" },
|
||||
{ "@isshared", true }
|
||||
};
|
||||
|
||||
var result = await db.ExecuteCMDAsync(sql, parameters);
|
||||
if (result.Rows.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Attachment not found or access denied.");
|
||||
}
|
||||
return await BuildContentView(result.Rows[0]);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves the binary data of a specific content attachment by its ID if the user has access to it.
|
||||
/// </summary>
|
||||
/// <param name="attachmentId">The ID of the content attachment to retrieve.</param>
|
||||
/// <param name="user">The user requesting the content; used for access control.</param>
|
||||
/// <returns>The binary data of the content attachment.</returns>
|
||||
/// <exception cref="InvalidOperationException">Thrown if the attachment is not found, access is denied, or the file is missing.</exception>
|
||||
public static async Task<byte[]> GetMetadataItemContentData(long attachmentId, Authentication.ApplicationUser user)
|
||||
{
|
||||
// return the requested attachment data if the user has access to it
|
||||
var contentView = await GetMetadataItemContent(attachmentId, user);
|
||||
if (contentView == null)
|
||||
{
|
||||
throw new InvalidOperationException("Attachment not found or access denied.");
|
||||
}
|
||||
|
||||
string userDirectory = contentView.UploadedByUserId == "System" ? "Global" : contentView.UploadedByUserId;
|
||||
string contentDir = contentView.ContentType == ContentType.GlobalManual ? "manuals" : contentView.ContentType.ToString().ToLower() + "s";
|
||||
string dirPath = Path.Combine(Config.LibraryConfiguration.LibraryContentDirectory, userDirectory, contentDir);
|
||||
string filePath = Path.Combine(dirPath, contentView.FileSystemFilename);
|
||||
if (!System.IO.File.Exists(filePath))
|
||||
{
|
||||
throw new InvalidOperationException("Attachment file not found on disk.");
|
||||
}
|
||||
|
||||
byte[] fileData = await System.IO.File.ReadAllBytesAsync(filePath);
|
||||
return fileData;
|
||||
}
|
||||
|
||||
private async static Task<ContentViewModel.ContentViewItemModel> BuildContentView(DataRow row)
|
||||
{
|
||||
var contentView = new ContentViewModel.ContentViewItemModel
|
||||
{
|
||||
MetadataId = Convert.ToInt64(row["MetadataMapID"]),
|
||||
Metadata = await MetadataManagement.GetMetadataMap(Convert.ToInt64(row["MetadataMapID"])),
|
||||
AttachmentId = Convert.ToInt64(row["AttachmentID"]),
|
||||
FileName = Convert.ToString(row["Filename"]) ?? "",
|
||||
FileSystemFilename = Convert.ToString(row["FileSystemFilename"]) ?? "",
|
||||
ContentType = (ContentType)Convert.ToInt32(row["AttachmentType"]),
|
||||
Size = Convert.ToInt64(row["Size"]),
|
||||
UploadedAt = Convert.ToDateTime(row["DateCreated"]),
|
||||
UploadedByUserId = Convert.ToString(row["UserId"]) ?? "",
|
||||
IsShared = Convert.ToBoolean(row["IsShared"])
|
||||
};
|
||||
|
||||
// remove unwanted heavy collection data from Metadata WITHOUT mutating the cached instance
|
||||
if (contentView.Metadata != null)
|
||||
{
|
||||
// Create a lightweight shallow copy so the cached MetadataMap remains intact
|
||||
var original = contentView.Metadata;
|
||||
contentView.Metadata = new MetadataMap
|
||||
{
|
||||
Id = original.Id,
|
||||
PlatformId = original.PlatformId,
|
||||
SignatureGameName = original.SignatureGameName,
|
||||
// Intentionally omit MetadataMapItems (set to null) for this trimmed view
|
||||
MetadataMapItems = null
|
||||
};
|
||||
}
|
||||
|
||||
// remove any file extensions from the FileName
|
||||
contentView.FileName = System.IO.Path.GetFileNameWithoutExtension(contentView.FileName);
|
||||
|
||||
// get uploader profile - if UserId is "System", set to null
|
||||
string userId = Convert.ToString(row["UserId"]) ?? "";
|
||||
if (userId == "System")
|
||||
{
|
||||
contentView.UploadedBy = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
// get the user account from the userId
|
||||
var userStore = new Authentication.UserStore();
|
||||
var user = await userStore.FindByIdAsync(userId, CancellationToken.None);
|
||||
if (user == null)
|
||||
{
|
||||
contentView.UploadedBy = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
var userProfile = new UserProfile();
|
||||
contentView.UploadedBy = await userProfile.GetUserProfile(user.ProfileId.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
return contentView;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a specific content attachment by its ID if the user has permission to delete it.
|
||||
/// </summary>
|
||||
/// <param name="attachmentId">The ID of the content attachment to delete.</param>
|
||||
/// <param name="user">The user requesting the deletion; used for permission checks.</param>
|
||||
/// <returns>A task representing the asynchronous operation.</returns>
|
||||
/// <exception cref="InvalidOperationException">Thrown if the attachment is not found or the user lacks permission to delete it.</exception>
|
||||
public static async Task DeleteMetadataItemContent(long attachmentId, Authentication.ApplicationUser user)
|
||||
{
|
||||
// user can only delete their own content, or system content if they are admin
|
||||
var existingContent = await GetMetadataItemContent(attachmentId, user);
|
||||
if (existingContent == null)
|
||||
{
|
||||
throw new InvalidOperationException("Attachment not found or access denied.");
|
||||
}
|
||||
|
||||
// check if user has permission to delete
|
||||
var userStore = new Authentication.UserStore();
|
||||
var userRoles = (await userStore.GetRolesAsync(user, new CancellationToken())).ToList();
|
||||
bool allowDelete = false;
|
||||
|
||||
if (existingContent.UploadedByUserId != user.Id && !userRoles.Contains("Admin"))
|
||||
{
|
||||
throw new InvalidOperationException("You do not have permission to delete this content.");
|
||||
}
|
||||
else if (existingContent.UploadedByUserId == "System" && !userRoles.Contains("Admin"))
|
||||
{
|
||||
throw new InvalidOperationException("You do not have permission to delete system content.");
|
||||
}
|
||||
else if (existingContent.UploadedByUserId == "System" && userRoles.Contains("Admin"))
|
||||
{
|
||||
// Admin deleting system content - allow
|
||||
allowDelete = true;
|
||||
}
|
||||
else if (existingContent.UploadedByUserId == user.Id)
|
||||
{
|
||||
// User deleting their own content - allow
|
||||
allowDelete = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("You do not have permission to delete this content.");
|
||||
}
|
||||
|
||||
if (!allowDelete)
|
||||
{
|
||||
throw new InvalidOperationException("You do not have permission to delete this content.");
|
||||
}
|
||||
|
||||
// delete the file from disk
|
||||
string userDirectory = existingContent.UploadedByUserId == "System" ? "Global" : existingContent.UploadedByUserId;
|
||||
string contentDir = existingContent.ContentType == ContentType.GlobalManual ? "manuals" : existingContent.ContentType.ToString().ToLower() + "s";
|
||||
string dirPath = Path.Combine(Config.LibraryConfiguration.LibraryContentDirectory, userDirectory, contentDir);
|
||||
string filePath = Path.Combine(dirPath, existingContent.FileSystemFilename);
|
||||
if (System.IO.File.Exists(filePath))
|
||||
{
|
||||
System.IO.File.Delete(filePath);
|
||||
}
|
||||
else
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.content_manager", "contentmanager.file_not_found_attempting_delete_attachment", null, new string[] { filePath, attachmentId.ToString() });
|
||||
}
|
||||
|
||||
// delete the database record
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = $"DELETE FROM MetadataMap_Attachments WHERE AttachmentID = {AttachmentIdParam};";
|
||||
var parameters = new Dictionary<string, object>
|
||||
{
|
||||
{ AttachmentIdParam, attachmentId }
|
||||
};
|
||||
await db.ExecuteCMDAsync(sql, parameters);
|
||||
RefreshNotificationSignal.MarkContentChanged();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates properties of a specific content attachment if the user has permission to update it.
|
||||
/// </summary>
|
||||
/// <param name="attachmentId">The ID of the content attachment to update.</param>
|
||||
/// <param name="user">The user requesting the update; used for permission checks.</param>
|
||||
/// <param name="isShared">Optional new value for the IsShared property; can only be modified for shareable content types.</param>
|
||||
/// <param name="content">Optional new content: Note type updates a file on disk; Screenshot, Photo, Video, and GlobalManual types update the filename field, which is used as the content title in the frontend</param>
|
||||
/// <returns>The updated ContentViewModel.ContentViewItemModel representing the content attachment.</returns>
|
||||
/// <exception cref="InvalidOperationException">Thrown if the attachment is not found, the user lacks permission to update it, or if invalid updates are attempted.</exception>
|
||||
public static async Task<ContentViewModel.ContentViewItemModel> UpdateMetadataItem(long attachmentId, Authentication.ApplicationUser user, bool? isShared = null, string? content = null)
|
||||
{
|
||||
// users can only update their own content
|
||||
// isShared can be modified for any user-owned content if the content type is shareable
|
||||
// content behaves differently based on content type
|
||||
// - Note content updates the note file on disk
|
||||
// - Screenshot, Photo, Video, and GlobalManual content updates the filename field, which is used as the content title in the frontend
|
||||
|
||||
// get existing content
|
||||
var existingContent = await GetMetadataItemContent(attachmentId, user);
|
||||
if (existingContent == null)
|
||||
{
|
||||
throw new InvalidOperationException("Attachment not found or access denied.");
|
||||
}
|
||||
|
||||
if (existingContent.UploadedByUserId != user.Id)
|
||||
{
|
||||
throw new InvalidOperationException("You do not have permission to update this content.");
|
||||
}
|
||||
|
||||
bool contentChanged = false;
|
||||
|
||||
if (isShared.HasValue)
|
||||
{
|
||||
// check if content type is shareable
|
||||
if (!_contentConfigurations.TryGetValue(existingContent.ContentType, out var config) || !config.IsShareable)
|
||||
{
|
||||
throw new InvalidOperationException("This content type cannot be shared.");
|
||||
}
|
||||
|
||||
// update isShared
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = $"UPDATE MetadataMap_Attachments SET IsShared = @isshared WHERE AttachmentID = {AttachmentIdParam};";
|
||||
var parameters = new Dictionary<string, object>
|
||||
{
|
||||
{ "@isshared", isShared.Value },
|
||||
{ AttachmentIdParam, attachmentId }
|
||||
};
|
||||
await db.ExecuteCMDAsync(sql, parameters);
|
||||
|
||||
existingContent.IsShared = isShared.Value;
|
||||
contentChanged = true;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(content))
|
||||
{
|
||||
// update content based on type
|
||||
switch (existingContent.ContentType)
|
||||
{
|
||||
case ContentType.Screenshot:
|
||||
case ContentType.Photo:
|
||||
case ContentType.Video:
|
||||
case ContentType.GlobalManual:
|
||||
// update the filename field in the database
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = $"UPDATE MetadataMap_Attachments SET Filename = @filename WHERE AttachmentID = {AttachmentIdParam};";
|
||||
var parameters = new Dictionary<string, object>
|
||||
{
|
||||
{ "@filename", content },
|
||||
{ AttachmentIdParam, attachmentId }
|
||||
};
|
||||
await db.ExecuteCMDAsync(sql, parameters);
|
||||
|
||||
existingContent.FileName = content;
|
||||
contentChanged = true;
|
||||
break;
|
||||
|
||||
case ContentType.Note:
|
||||
// update the note content
|
||||
string userDirectory = existingContent.UploadedByUserId == "System" ? "Global" : existingContent.UploadedByUserId;
|
||||
string contentDir = existingContent.ContentType.ToString().ToLower() + "s";
|
||||
string dirPath = Path.Combine(Config.LibraryConfiguration.LibraryContentDirectory, userDirectory, contentDir);
|
||||
string filePath = Path.Combine(dirPath, existingContent.FileName);
|
||||
await System.IO.File.WriteAllTextAsync(filePath, content);
|
||||
contentChanged = true;
|
||||
break;
|
||||
default:
|
||||
throw new InvalidOperationException("This content type cannot be updated.");
|
||||
}
|
||||
}
|
||||
|
||||
if (contentChanged)
|
||||
{
|
||||
RefreshNotificationSignal.MarkContentChanged();
|
||||
}
|
||||
|
||||
return existingContent;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert an animated GIF (byte array) to MP4 using an installed ffmpeg binary. Returns null if conversion fails.
|
||||
/// </summary>
|
||||
/// <param name="gifBytes">Source GIF bytes.</param>
|
||||
/// <param name="fps">Target frames per second (5-60 recommended).</param>
|
||||
/// <returns>MP4 bytes or null on failure.</returns>
|
||||
private static byte[]? ConvertGifToMp4WithFfmpeg(byte[] gifBytes, int fps)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Ensure fps bounds
|
||||
if (fps < 5) fps = 5;
|
||||
if (fps > 60) fps = 60;
|
||||
|
||||
// Check for ffmpeg availability
|
||||
string ffmpegPath = "ffmpeg"; // rely on PATH
|
||||
try
|
||||
{
|
||||
var check = Process.Start(new ProcessStartInfo
|
||||
{
|
||||
FileName = ffmpegPath,
|
||||
Arguments = "-version",
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true
|
||||
});
|
||||
if (check == null)
|
||||
return null;
|
||||
check.WaitForExit(3000);
|
||||
if (check.ExitCode != 0)
|
||||
return null;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ffmpeg not available
|
||||
return null;
|
||||
}
|
||||
|
||||
string tempDir = Path.Combine(Path.GetTempPath(), "gaseous_gifconv");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
string inputPath = Path.Combine(tempDir, Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture) + ".gif");
|
||||
string outputPath = Path.Combine(tempDir, Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture) + ".mp4");
|
||||
File.WriteAllBytes(inputPath, gifBytes);
|
||||
|
||||
// Build ffmpeg arguments:
|
||||
// -y overwrite
|
||||
// -i input.gif
|
||||
// -vf scale filter to ensure even dimensions and chosen fps
|
||||
// -movflags +faststart for streaming friendliness
|
||||
// -pix_fmt yuv420p for compatibility
|
||||
string args = $"-hide_banner -loglevel error -y -i \"{inputPath}\" -vf \"scale=trunc(iw/2)*2:trunc(ih/2)*2,fps={fps}\" -movflags +faststart -pix_fmt yuv420p -an \"{outputPath}\"";
|
||||
|
||||
var psi = new ProcessStartInfo
|
||||
{
|
||||
FileName = ffmpegPath,
|
||||
Arguments = args,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
using var proc = Process.Start(psi);
|
||||
if (proc == null)
|
||||
return null;
|
||||
// Capture stderr in case of error for logging
|
||||
string stdErr = proc.StandardError.ReadToEnd();
|
||||
proc.WaitForExit(30000); // 30s timeout for safety
|
||||
if (proc.ExitCode != 0 || !File.Exists(outputPath))
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.content_manager", "contentmanager.ffmpeg_gif_to_mp4_conversion_failed", null, new string[] { proc.ExitCode.ToString(), stdErr });
|
||||
return null;
|
||||
}
|
||||
|
||||
byte[] mp4 = File.ReadAllBytes(outputPath);
|
||||
// Cleanup (best effort)
|
||||
try { File.Delete(inputPath); } catch { }
|
||||
try { File.Delete(outputPath); } catch { }
|
||||
return mp4;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.content_manager", "contentmanager.ffmpeg_gif_to_mp4_conversion_exception", null, new string[] { ex.Message });
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
947
gaseous-lib/Classes/Database/Database.cs
Normal file
947
gaseous-lib/Classes/Database/Database.cs
Normal file
|
|
@ -0,0 +1,947 @@
|
|||
|
||||
using System.Data;
|
||||
using System.Data.SqlClient;
|
||||
using System.Diagnostics;
|
||||
using System.Linq.Expressions;
|
||||
using System.Reflection;
|
||||
using MySqlConnector;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides methods for interacting with the database, including schema management, command execution, and transactions.
|
||||
/// </summary>
|
||||
public class Database
|
||||
{
|
||||
private static int _schema_version { get; set; } = 0;
|
||||
/// <summary>
|
||||
/// Gets or sets the current schema version of the database.
|
||||
/// </summary>
|
||||
public static int schema_version
|
||||
{
|
||||
get
|
||||
{
|
||||
//Logging.Log(Logging.LogType.Information, "Database Schema", "Schema version is " + _schema_version);
|
||||
return _schema_version;
|
||||
}
|
||||
set
|
||||
{
|
||||
//Logging.Log(Logging.LogType.Information, "Database Schema", "Setting schema version " + _schema_version);
|
||||
_schema_version = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="Database"/> class.
|
||||
/// </summary>
|
||||
public Database()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="Database"/> class with the specified database type and connection string.
|
||||
/// </summary>
|
||||
/// <param name="Type">The type of database connector to use.</param>
|
||||
/// <param name="ConnectionString">The connection string to use for the database connection.</param>
|
||||
public Database(databaseType Type, string ConnectionString)
|
||||
{
|
||||
_ConnectorType = Type;
|
||||
_ConnectionString = ConnectionString;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the type of database connector being used.
|
||||
/// </summary>
|
||||
/// <summary>
|
||||
/// Represents a MySQL database type.
|
||||
/// </summary>
|
||||
public enum databaseType
|
||||
{
|
||||
/// <summary>
|
||||
/// MySQL database.
|
||||
/// </summary>
|
||||
MySql
|
||||
}
|
||||
|
||||
string _ConnectionString = "";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the connection string used to connect to the database.
|
||||
/// </summary>
|
||||
public string ConnectionString
|
||||
{
|
||||
get
|
||||
{
|
||||
return _ConnectionString;
|
||||
}
|
||||
set
|
||||
{
|
||||
_ConnectionString = value;
|
||||
}
|
||||
}
|
||||
|
||||
databaseType? _ConnectorType = null;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the type of database connector being used.
|
||||
/// </summary>
|
||||
public databaseType? ConnectorType
|
||||
{
|
||||
get
|
||||
{
|
||||
return _ConnectorType;
|
||||
}
|
||||
set
|
||||
{
|
||||
_ConnectorType = value;
|
||||
}
|
||||
}
|
||||
|
||||
private static MemoryCache DatabaseMemoryCache = new MemoryCache();
|
||||
|
||||
/// <summary>
|
||||
/// Initializes the database, creates schema version table if missing, and applies schema upgrades.
|
||||
/// Takes a backup before the first migration step, retries timed-out steps with a larger timeout,
|
||||
/// and logs restore instructions on failure before terminating.
|
||||
/// </summary>
|
||||
public async Task InitDB()
|
||||
{
|
||||
// load resources
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
|
||||
DatabaseMemoryCacheOptions? CacheOptions = new DatabaseMemoryCacheOptions(false);
|
||||
|
||||
Config.DatabaseConfiguration.UpgradeInProgress = true;
|
||||
|
||||
switch (_ConnectorType)
|
||||
{
|
||||
case databaseType.MySql:
|
||||
// check if the database exists first - first run must have permissions to create a database
|
||||
string sql = "CREATE DATABASE IF NOT EXISTS `" + Config.DatabaseConfiguration.DatabaseName + "`;";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.creating_database_if_not_exists");
|
||||
ExecuteCMD(sql, dbDict, CacheOptions, 30, "server=" + Config.DatabaseConfiguration.HostName + ";port=" + Config.DatabaseConfiguration.Port + ";userid=" + Config.DatabaseConfiguration.UserName + ";password=" + Config.DatabaseConfiguration.Password);
|
||||
|
||||
// check if schema version table is in place - if not, create the schema version table
|
||||
sql = "SELECT TABLE_SCHEMA, TABLE_NAME FROM information_schema.TABLES WHERE TABLE_SCHEMA = '" + Config.DatabaseConfiguration.DatabaseName + "' AND TABLE_NAME = 'schema_version';";
|
||||
DataTable SchemaVersionPresent = ExecuteCMD(sql, dbDict, CacheOptions);
|
||||
if (SchemaVersionPresent.Rows.Count == 0)
|
||||
{
|
||||
// no schema table present - create it
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.schema_version_table_missing_creating");
|
||||
sql = "CREATE TABLE `schema_version` (`schema_version` INT NOT NULL, PRIMARY KEY (`schema_version`)); INSERT INTO `schema_version` (`schema_version`) VALUES (0);";
|
||||
ExecuteCMD(sql, dbDict, CacheOptions);
|
||||
}
|
||||
|
||||
// ensure migration journal table exists before any migration steps run
|
||||
MigrationJournal.EnsureTable();
|
||||
|
||||
sql = "SELECT schema_version FROM schema_version;";
|
||||
dbDict = new Dictionary<string, object>();
|
||||
DataTable SchemaVersion = ExecuteCMD(sql, dbDict, CacheOptions);
|
||||
int OuterSchemaVer = (int)SchemaVersion.Rows[0][0];
|
||||
if (OuterSchemaVer == 0)
|
||||
{
|
||||
OuterSchemaVer = 1000;
|
||||
}
|
||||
|
||||
// --- PREFLIGHT: scan for all migration resources that need to be applied ---
|
||||
// If a contiguous sequence exists and a resource is missing in that sequence,
|
||||
// fail early before touching any data. This prevents partial migrations caused
|
||||
// by a missing embedded SQL file making it to production.
|
||||
{
|
||||
string[] allResources = Assembly.GetExecutingAssembly().GetManifestResourceNames();
|
||||
int preflight = OuterSchemaVer;
|
||||
bool foundAny = false;
|
||||
while (true)
|
||||
{
|
||||
string rn = "gaseous_lib.Support.Database.MySQL.gaseous-" + preflight + ".sql";
|
||||
if (!allResources.Contains(rn)) break;
|
||||
foundAny = true;
|
||||
preflight++;
|
||||
}
|
||||
// If we found at least one pending resource, verify version sequence is
|
||||
// contiguous up to the last one found.
|
||||
if (foundAny)
|
||||
{
|
||||
// preflight now points to the first missing version after a run of
|
||||
// present versions — nothing more to check, sequence is intact.
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database",
|
||||
"database.preflight_migration_resources_verified",
|
||||
null, new[] { OuterSchemaVer.ToString(), (preflight - 1).ToString() });
|
||||
}
|
||||
}
|
||||
|
||||
// --- BACKUP: take a backup before the first migration is applied ---
|
||||
bool backupTaken = false;
|
||||
string backupFilePath = "";
|
||||
{
|
||||
string[] allResources = Assembly.GetExecutingAssembly().GetManifestResourceNames();
|
||||
bool hasPendingMigration = allResources.Contains(
|
||||
"gaseous_lib.Support.Database.MySQL.gaseous-" + OuterSchemaVer + ".sql");
|
||||
|
||||
if (hasPendingMigration)
|
||||
{
|
||||
try
|
||||
{
|
||||
backupFilePath = DatabaseBackup.GenerateBackupPath();
|
||||
DatabaseBackup.Backup(backupFilePath);
|
||||
backupTaken = true;
|
||||
}
|
||||
catch (Exception backupEx)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database",
|
||||
"database.backup_failed_aborting_migration",
|
||||
null, new[] { backupEx.Message }, backupEx);
|
||||
System.Environment.Exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = OuterSchemaVer; i < 10000; i++)
|
||||
{
|
||||
string resourceName = "gaseous_lib.Support.Database.MySQL.gaseous-" + i + ".sql";
|
||||
string dbScript = "";
|
||||
|
||||
string[] resources = Assembly.GetExecutingAssembly().GetManifestResourceNames();
|
||||
if (resources.Contains(resourceName))
|
||||
{
|
||||
using (Stream stream = assembly.GetManifestResourceStream(resourceName))
|
||||
using (StreamReader reader = new StreamReader(stream))
|
||||
{
|
||||
dbScript = reader.ReadToEnd();
|
||||
|
||||
// apply script
|
||||
sql = "SELECT schema_version FROM schema_version;";
|
||||
dbDict = new Dictionary<string, object>();
|
||||
SchemaVersion = ExecuteCMD(sql, dbDict, CacheOptions);
|
||||
if (SchemaVersion.Rows.Count == 0)
|
||||
{
|
||||
// something is broken here... where's the table?
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database", "database.schema_table_missing_should_not_happen");
|
||||
throw new Exception("schema_version table is missing!");
|
||||
}
|
||||
else
|
||||
{
|
||||
int SchemaVer = (int)SchemaVersion.Rows[0][0];
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.schema_version_is", null, new[] { SchemaVer.ToString() });
|
||||
// update schema version variable
|
||||
Database.schema_version = SchemaVer;
|
||||
if (SchemaVer < i)
|
||||
{
|
||||
// Step timeouts: first attempt uses 360s, retry doubles up to 1440s (24 min)
|
||||
int[] timeoutSequence = { 360, 720, 1440 };
|
||||
bool stepSucceeded = false;
|
||||
Exception? lastException = null;
|
||||
|
||||
foreach (int stepTimeout in timeoutSequence)
|
||||
{
|
||||
try
|
||||
{
|
||||
long preJournalId = MigrationJournal.Start(i, MigrationJournal.StepType.PreUpgrade, "PreUpgradeScript");
|
||||
try
|
||||
{
|
||||
await DatabaseMigration.PreUpgradeScript(i, _ConnectorType);
|
||||
MigrationJournal.Complete(preJournalId);
|
||||
}
|
||||
catch (Exception preEx) when (preEx.Message.Contains("timeout", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
MigrationJournal.Fail(preJournalId, preEx.Message);
|
||||
throw; // let outer handler retry
|
||||
}
|
||||
catch (Exception preEx)
|
||||
{
|
||||
MigrationJournal.Fail(preJournalId, preEx.Message);
|
||||
throw;
|
||||
}
|
||||
|
||||
long sqlJournalId = MigrationJournal.Start(i, MigrationJournal.StepType.SqlScript, resourceName);
|
||||
try
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database",
|
||||
"database.updating_schema_to_version",
|
||||
null, new[] { i.ToString(), stepTimeout.ToString() });
|
||||
await ExecuteCMDAsync(dbScript, dbDict, stepTimeout);
|
||||
MigrationJournal.Complete(sqlJournalId);
|
||||
}
|
||||
catch (Exception sqlEx) when (sqlEx.Message.Contains("timeout", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
MigrationJournal.Fail(sqlJournalId, sqlEx.Message);
|
||||
throw; // let outer handler retry
|
||||
}
|
||||
catch (Exception sqlEx)
|
||||
{
|
||||
MigrationJournal.Fail(sqlJournalId, sqlEx.Message);
|
||||
throw;
|
||||
}
|
||||
|
||||
// increment schema version
|
||||
sql = "UPDATE schema_version SET schema_version=@schemaver";
|
||||
dbDict = new Dictionary<string, object>();
|
||||
dbDict.Add("schemaver", i);
|
||||
await ExecuteCMDAsync(sql, dbDict, CacheOptions);
|
||||
|
||||
// run post-upgrade code
|
||||
long postJournalId = MigrationJournal.Start(i, MigrationJournal.StepType.PostUpgrade, "PostUpgradeScript");
|
||||
try
|
||||
{
|
||||
DatabaseMigration.PostUpgradeScript(i, _ConnectorType);
|
||||
MigrationJournal.Complete(postJournalId);
|
||||
}
|
||||
catch (Exception postEx)
|
||||
{
|
||||
MigrationJournal.Fail(postJournalId, postEx.Message);
|
||||
throw;
|
||||
}
|
||||
|
||||
// run validation checks for this schema version
|
||||
if (!DatabaseMigrationValidator.ValidateVersion(i))
|
||||
{
|
||||
throw new Exception($"Post-migration validation failed for schema version {i}. Check logs for details.");
|
||||
}
|
||||
|
||||
// update schema version variable
|
||||
Database.schema_version = i;
|
||||
stepSucceeded = true;
|
||||
break; // no need to retry
|
||||
}
|
||||
catch (Exception ex) when (ex.Message.Contains("timeout", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
lastException = ex;
|
||||
if (stepTimeout == timeoutSequence[timeoutSequence.Length - 1])
|
||||
{
|
||||
// exhausted all retries
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database",
|
||||
"database.schema_upgrade_timed_out_all_retries",
|
||||
null, new[] { i.ToString(), stepTimeout.ToString() }, ex);
|
||||
}
|
||||
else
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.database",
|
||||
"database.schema_upgrade_timed_out_retrying",
|
||||
null, new[] { i.ToString(), stepTimeout.ToString() }, ex);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
lastException = ex;
|
||||
break; // non-timeout errors are not retried
|
||||
}
|
||||
}
|
||||
|
||||
if (!stepSucceeded)
|
||||
{
|
||||
string reason = lastException?.Message ?? "Unknown error";
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database",
|
||||
"database.schema_upgrade_failed_unable_to_continue",
|
||||
null, null, lastException);
|
||||
|
||||
if (backupTaken)
|
||||
{
|
||||
DatabaseBackup.LogRestoreInstructions(backupFilePath, i, reason);
|
||||
}
|
||||
System.Environment.Exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.setup_complete");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Splits a SQL script into individual statements, ignoring semicolons inside strings and comments.
|
||||
/// </summary>
|
||||
private static List<string> SplitSqlStatements(string sqlScript)
|
||||
{
|
||||
var statements = new List<string>();
|
||||
var sb = new System.Text.StringBuilder();
|
||||
bool inSingleQuote = false;
|
||||
bool inDoubleQuote = false;
|
||||
bool inLineComment = false;
|
||||
bool inBlockComment = false;
|
||||
|
||||
for (int i = 0; i < sqlScript.Length; i++)
|
||||
{
|
||||
char c = sqlScript[i];
|
||||
char next = i < sqlScript.Length - 1 ? sqlScript[i + 1] : '\0';
|
||||
|
||||
// Handle entering/exiting comments
|
||||
if (!inSingleQuote && !inDoubleQuote)
|
||||
{
|
||||
if (!inBlockComment && c == '-' && next == '-')
|
||||
{
|
||||
inLineComment = true;
|
||||
}
|
||||
else if (!inBlockComment && c == '/' && next == '*')
|
||||
{
|
||||
inBlockComment = true;
|
||||
i++; // skip '*'
|
||||
continue;
|
||||
}
|
||||
else if (inBlockComment && c == '*' && next == '/')
|
||||
{
|
||||
inBlockComment = false;
|
||||
i++; // skip '/'
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (inLineComment)
|
||||
{
|
||||
if (c == '\n')
|
||||
{
|
||||
inLineComment = false;
|
||||
sb.Append(c);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (inBlockComment)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle entering/exiting string literals
|
||||
if (c == '\'' && !inDoubleQuote)
|
||||
{
|
||||
inSingleQuote = !inSingleQuote;
|
||||
}
|
||||
else if (c == '"' && !inSingleQuote)
|
||||
{
|
||||
inDoubleQuote = !inDoubleQuote;
|
||||
}
|
||||
|
||||
// Split on semicolon if not inside a string
|
||||
if (c == ';' && !inSingleQuote && !inDoubleQuote)
|
||||
{
|
||||
var statement = sb.ToString().Trim();
|
||||
if (!string.IsNullOrEmpty(statement))
|
||||
statements.Add(statement);
|
||||
sb.Clear();
|
||||
}
|
||||
else
|
||||
{
|
||||
sb.Append(c);
|
||||
}
|
||||
}
|
||||
|
||||
// Add any remaining statement
|
||||
var last = sb.ToString().Trim();
|
||||
if (!string.IsNullOrEmpty(last))
|
||||
statements.Add(last);
|
||||
|
||||
return statements;
|
||||
}
|
||||
|
||||
#region Synchronous Database Access
|
||||
public DataTable ExecuteCMD(string Command)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMD(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public DataTable ExecuteCMD(string Command, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMD(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public DataTable ExecuteCMD(string Command, Dictionary<string, object> Parameters)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public DataTable ExecuteCMD(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public DataTable ExecuteCMD(string Command, Dictionary<string, object> Parameters, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
|
||||
public DataTable ExecuteCMD(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
|
||||
public List<Dictionary<string, object>> ExecuteCMDDict(string Command)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMDDict(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public List<Dictionary<string, object>> ExecuteCMDDict(string Command, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMDDict(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public List<Dictionary<string, object>> ExecuteCMDDict(string Command, Dictionary<string, object> Parameters)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public List<Dictionary<string, object>> ExecuteCMDDict(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public List<Dictionary<string, object>> ExecuteCMDDict(string Command, Dictionary<string, object> Parameters, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
|
||||
public List<Dictionary<string, object>> ExecuteCMDDict(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
#endregion Synchronous Database Access
|
||||
|
||||
#region Asynchronous Database Access
|
||||
public async Task<DataTable> ExecuteCMDAsync(string Command)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMD(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<DataTable> ExecuteCMDAsync(string Command, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMD(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<DataTable> ExecuteCMDAsync(string Command, Dictionary<string, object> Parameters)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<DataTable> ExecuteCMDAsync(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<DataTable> ExecuteCMDAsync(string Command, Dictionary<string, object> Parameters, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
|
||||
public async Task<DataTable> ExecuteCMDAsync(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
return _ExecuteCMD(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
|
||||
public async Task<List<Dictionary<string, object>>> ExecuteCMDDictAsync(string Command)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMDDict(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<List<Dictionary<string, object>>> ExecuteCMDDictAsync(string Command, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteCMDDict(Command, dbDict, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<List<Dictionary<string, object>>> ExecuteCMDDictAsync(string Command, Dictionary<string, object> Parameters)
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<List<Dictionary<string, object>>> ExecuteCMDDictAsync(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions)
|
||||
{
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, 30, "");
|
||||
}
|
||||
|
||||
public async Task<List<Dictionary<string, object>>> ExecuteCMDDictAsync(string Command, Dictionary<string, object> Parameters, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
DatabaseMemoryCacheOptions? CacheOptions = null;
|
||||
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
|
||||
public async Task<List<Dictionary<string, object>>> ExecuteCMDDictAsync(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
return _ExecuteCMDDict(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
}
|
||||
#endregion Asynchronous Database Access
|
||||
|
||||
|
||||
private List<Dictionary<string, object>> _ExecuteCMDDict(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
DataTable dataTable = _ExecuteCMD(Command, Parameters, CacheOptions, Timeout, ConnectionString);
|
||||
|
||||
// convert datatable to dictionary
|
||||
List<Dictionary<string, object?>> rows = new List<Dictionary<string, object?>>();
|
||||
|
||||
foreach (DataRow dataRow in dataTable.Rows)
|
||||
{
|
||||
Dictionary<string, object?> row = new Dictionary<string, object?>();
|
||||
for (int i = 0; i < dataRow.Table.Columns.Count; i++)
|
||||
{
|
||||
string columnName = dataRow.Table.Columns[i].ColumnName;
|
||||
if (dataRow[i] == System.DBNull.Value)
|
||||
{
|
||||
row.Add(columnName, null);
|
||||
}
|
||||
else
|
||||
{
|
||||
row.Add(columnName, dataRow[i].ToString());
|
||||
}
|
||||
}
|
||||
rows.Add(row);
|
||||
}
|
||||
|
||||
return rows;
|
||||
}
|
||||
|
||||
private DataTable _ExecuteCMD(string Command, Dictionary<string, object> Parameters, DatabaseMemoryCacheOptions? CacheOptions, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
string CacheKey = Command + string.Join(";", Parameters.Select(x => string.Join("=", x.Key, x.Value)));
|
||||
if (CacheOptions?.CacheKey != null)
|
||||
{
|
||||
CacheKey = CacheOptions.CacheKey;
|
||||
}
|
||||
|
||||
if (CacheOptions is object && CacheOptions.CacheEnabled)
|
||||
{
|
||||
object? CachedData = DatabaseMemoryCache.GetCacheObject(CacheKey);
|
||||
if (CachedData is object)
|
||||
{
|
||||
return (DataTable)CachedData;
|
||||
}
|
||||
}
|
||||
|
||||
// purge cache if command contains "INSERT", "UPDATE", "DELETE", or "ALTER"
|
||||
if (
|
||||
Command.Contains("INSERT", StringComparison.InvariantCultureIgnoreCase) ||
|
||||
Command.Contains("UPDATE", StringComparison.InvariantCultureIgnoreCase) ||
|
||||
Command.Contains("DELETE", StringComparison.InvariantCultureIgnoreCase) ||
|
||||
Command.Contains("ALTER", StringComparison.InvariantCultureIgnoreCase)
|
||||
)
|
||||
{
|
||||
// exclude logging events from purging the cache
|
||||
if (!Command.StartsWith("INSERT INTO SERVERLOGS", StringComparison.InvariantCultureIgnoreCase))
|
||||
{
|
||||
DatabaseMemoryCache.ClearCache();
|
||||
}
|
||||
}
|
||||
|
||||
if (ConnectionString == "") { ConnectionString = _ConnectionString; }
|
||||
switch (_ConnectorType)
|
||||
{
|
||||
case databaseType.MySql:
|
||||
MySQLServerConnector conn = new MySQLServerConnector(ConnectionString);
|
||||
DataTable RetTable = conn.ExecCMD(Command, Parameters, Timeout);
|
||||
if (CacheOptions is object && CacheOptions.CacheEnabled)
|
||||
{
|
||||
DatabaseMemoryCache.SetCacheObject(CacheKey, RetTable, CacheOptions.ExpirationSeconds);
|
||||
}
|
||||
return RetTable;
|
||||
default:
|
||||
return new DataTable();
|
||||
}
|
||||
}
|
||||
|
||||
public int ExecuteNonQuery(string Command)
|
||||
{
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
return _ExecuteNonQuery(Command, dbDict, 30, "");
|
||||
}
|
||||
|
||||
public int ExecuteNonQuery(string Command, Dictionary<string, object> Parameters)
|
||||
{
|
||||
return _ExecuteNonQuery(Command, Parameters, 30, "");
|
||||
}
|
||||
|
||||
public int ExecuteNonQuery(string Command, Dictionary<string, object> Parameters, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
return _ExecuteNonQuery(Command, Parameters, Timeout, ConnectionString);
|
||||
}
|
||||
|
||||
private int _ExecuteNonQuery(string Command, Dictionary<string, object> Parameters, int Timeout = 30, string ConnectionString = "")
|
||||
{
|
||||
if (ConnectionString == "") { ConnectionString = _ConnectionString; }
|
||||
switch (_ConnectorType)
|
||||
{
|
||||
case databaseType.MySql:
|
||||
MySQLServerConnector conn = new MySQLServerConnector(ConnectionString);
|
||||
int retVal = conn.ExecNonQuery(Command, Parameters, Timeout);
|
||||
return retVal;
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
public void ExecuteTransactionCMD(List<SQLTransactionItem> CommandList, int Timeout = 60)
|
||||
{
|
||||
object conn;
|
||||
switch (_ConnectorType)
|
||||
{
|
||||
case databaseType.MySql:
|
||||
{
|
||||
var commands = new List<Dictionary<string, object>>();
|
||||
foreach (SQLTransactionItem CommandItem in CommandList)
|
||||
{
|
||||
var nCmd = new Dictionary<string, object>();
|
||||
nCmd.Add("sql", CommandItem.SQLCommand);
|
||||
nCmd.Add("values", CommandItem.Parameters);
|
||||
commands.Add(nCmd);
|
||||
}
|
||||
|
||||
conn = new MySQLServerConnector(_ConnectionString);
|
||||
((MySQLServerConnector)conn).TransactionExecCMD(commands, Timeout);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int GetDatabaseSchemaVersion()
|
||||
{
|
||||
switch (_ConnectorType)
|
||||
{
|
||||
case databaseType.MySql:
|
||||
string sql = "SELECT schema_version FROM schema_version;";
|
||||
DataTable SchemaVersion = ExecuteCMD(sql);
|
||||
if (SchemaVersion.Rows.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
return (int)SchemaVersion.Rows[0][0];
|
||||
}
|
||||
|
||||
default:
|
||||
return 0;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public bool TestConnection()
|
||||
{
|
||||
switch (_ConnectorType)
|
||||
{
|
||||
case databaseType.MySql:
|
||||
MySQLServerConnector conn = new MySQLServerConnector(_ConnectionString);
|
||||
return conn.TestConnection();
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public class SQLTransactionItem
|
||||
{
|
||||
public SQLTransactionItem()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public SQLTransactionItem(string SQLCommand, Dictionary<string, object> Parameters)
|
||||
{
|
||||
this.SQLCommand = SQLCommand;
|
||||
this.Parameters = Parameters;
|
||||
}
|
||||
|
||||
public string? SQLCommand;
|
||||
public Dictionary<string, object>? Parameters = new Dictionary<string, object>();
|
||||
}
|
||||
|
||||
private partial class MySQLServerConnector
|
||||
{
|
||||
private string DBConn = "";
|
||||
|
||||
public MySQLServerConnector(string ConnectionString)
|
||||
{
|
||||
DBConn = ConnectionString;
|
||||
}
|
||||
|
||||
public DataTable ExecCMD(string SQL, Dictionary<string, object> Parameters, int Timeout)
|
||||
{
|
||||
DataTable RetTable = new DataTable();
|
||||
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.connecting_to_database", null, null, null, true);
|
||||
using (MySqlConnection conn = new MySqlConnection(DBConn))
|
||||
{
|
||||
conn.Open();
|
||||
|
||||
MySqlCommand cmd = new MySqlCommand
|
||||
{
|
||||
Connection = conn,
|
||||
CommandText = SQL,
|
||||
CommandTimeout = Timeout
|
||||
};
|
||||
|
||||
foreach (string Parameter in Parameters.Keys)
|
||||
{
|
||||
cmd.Parameters.AddWithValue(Parameter, Parameters[Parameter]);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.executing_sql", null, new[] { SQL }, null, true);
|
||||
if (Parameters.Count > 0)
|
||||
{
|
||||
string dictValues = string.Join(";", Parameters.Select(x => string.Join("=", x.Key, x.Value)));
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.parameters", null, new[] { dictValues }, null, true);
|
||||
}
|
||||
RetTable.Load(cmd.ExecuteReader());
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database", "database.error_executing_sql", null, new[] { SQL }, ex);
|
||||
#if DEBUG
|
||||
if (Parameters.Count > 0)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database", "database.parameters");
|
||||
foreach (string param in Parameters.Keys)
|
||||
{
|
||||
string typeName = Parameters[param]?.GetType().ToString() ?? "unknown";
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database", param + " = " + Parameters[param] + " (" + typeName + ")");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.closing_database_connection", null, null, null, true);
|
||||
conn.Close();
|
||||
}
|
||||
|
||||
return RetTable;
|
||||
}
|
||||
|
||||
public int ExecNonQuery(string SQL, Dictionary<string, object> Parameters, int Timeout)
|
||||
{
|
||||
int result = 0;
|
||||
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.connecting_to_database", null, null, null, true);
|
||||
using (MySqlConnection conn = new MySqlConnection(DBConn))
|
||||
{
|
||||
conn.Open();
|
||||
|
||||
MySqlCommand cmd = new MySqlCommand
|
||||
{
|
||||
Connection = conn,
|
||||
CommandText = SQL,
|
||||
CommandTimeout = Timeout
|
||||
};
|
||||
|
||||
foreach (string Parameter in Parameters.Keys)
|
||||
{
|
||||
cmd.Parameters.AddWithValue(Parameter, Parameters[Parameter]);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.executing_sql", null, new[] { SQL }, null, true);
|
||||
if (Parameters.Count > 0)
|
||||
{
|
||||
string dictValues = string.Join(";", Parameters.Select(x => string.Join("=", x.Key, x.Value)));
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.parameters", null, new[] { dictValues }, null, true);
|
||||
}
|
||||
result = cmd.ExecuteNonQuery();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database", "database.error_executing_sql", null, new[] { SQL }, ex);
|
||||
Trace.WriteLine("Error executing " + SQL);
|
||||
Trace.WriteLine("Full exception: " + ex.ToString());
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.database", "database.closing_database_connection", null, null, null, true);
|
||||
conn.Close();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public void TransactionExecCMD(List<Dictionary<string, object>> Parameters, int Timeout)
|
||||
{
|
||||
using (MySqlConnection conn = new MySqlConnection(DBConn))
|
||||
{
|
||||
conn.Open();
|
||||
var command = conn.CreateCommand();
|
||||
MySqlTransaction transaction;
|
||||
transaction = conn.BeginTransaction();
|
||||
command.Connection = conn;
|
||||
command.Transaction = transaction;
|
||||
foreach (Dictionary<string, object> Parameter in Parameters)
|
||||
{
|
||||
var cmd = buildcommand(conn, Parameter["sql"].ToString(), (Dictionary<string, object>)Parameter["values"], Timeout);
|
||||
cmd.Transaction = transaction;
|
||||
cmd.ExecuteNonQuery();
|
||||
}
|
||||
|
||||
transaction.Commit();
|
||||
conn.Close();
|
||||
}
|
||||
}
|
||||
|
||||
private MySqlCommand buildcommand(MySqlConnection Conn, string SQL, Dictionary<string, object> Parameters, int Timeout)
|
||||
{
|
||||
var cmd = new MySqlCommand();
|
||||
cmd.Connection = Conn;
|
||||
cmd.CommandText = SQL;
|
||||
cmd.CommandTimeout = Timeout;
|
||||
{
|
||||
var withBlock = cmd.Parameters;
|
||||
if (Parameters is object)
|
||||
{
|
||||
if (Parameters.Count > 0)
|
||||
{
|
||||
foreach (string param in Parameters.Keys)
|
||||
withBlock.AddWithValue(param, Parameters[param]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
public bool TestConnection()
|
||||
{
|
||||
using (MySqlConnection conn = new MySqlConnection(DBConn))
|
||||
{
|
||||
try
|
||||
{
|
||||
conn.Open();
|
||||
conn.Close();
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
344
gaseous-lib/Classes/Database/DatabaseBackup.cs
Normal file
344
gaseous-lib/Classes/Database/DatabaseBackup.cs
Normal file
|
|
@ -0,0 +1,344 @@
|
|||
using System;
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides database backup and restore capability using mysqldump.
|
||||
/// A backup is taken before any migration steps run. If migration fails,
|
||||
/// restore instructions (including the exact restore command) are written
|
||||
/// to the log so that support can recover the database.
|
||||
/// </summary>
|
||||
public static class DatabaseBackup
|
||||
{
|
||||
/// <summary>
|
||||
/// Identifies the database client family used for backup and restore.
|
||||
/// </summary>
|
||||
private enum BackupProvider
|
||||
{
|
||||
/// <summary>MariaDB and MySQL compatible command-line tools.</summary>
|
||||
MySqlLike,
|
||||
/// <summary>PostgreSQL command-line tools.</summary>
|
||||
PostgreSql
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a timestamped backup file path under the library backup directory.
|
||||
/// Example: /path/to/library/Backups/gaseous-backup-20260411-140035.sql
|
||||
/// </summary>
|
||||
public static string GenerateBackupPath()
|
||||
{
|
||||
string backupDir = Path.Combine(Config.LibraryConfiguration.LibraryRootDirectory, "Backups");
|
||||
if (!Directory.Exists(backupDir))
|
||||
{
|
||||
Directory.CreateDirectory(backupDir);
|
||||
}
|
||||
|
||||
string timestamp = DateTime.UtcNow.ToString("yyyyMMdd-HHmmss");
|
||||
return Path.Combine(backupDir, $"gaseous-backup-{timestamp}.sql");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes a full dump of the configured database to <paramref name="backupFilePath"/>
|
||||
/// using the configured database engine's backup tool. Throws
|
||||
/// <see cref="DatabaseBackupException"/> if the dump fails so the caller can abort
|
||||
/// the migration safely.
|
||||
/// </summary>
|
||||
/// <param name="backupFilePath">Absolute path where the .sql dump will be written.</param>
|
||||
public static void Backup(string backupFilePath)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.backup_starting",
|
||||
null, new[] { backupFilePath });
|
||||
|
||||
var cfg = Config.DatabaseConfiguration;
|
||||
|
||||
var provider = ResolveProvider();
|
||||
var psi = BuildBackupProcessStartInfo(provider, backupFilePath);
|
||||
|
||||
using var process = Process.Start(psi)
|
||||
?? throw new DatabaseBackupException($"Failed to start backup process '{psi.FileName}'.");
|
||||
|
||||
string stderr = process.StandardError.ReadToEnd();
|
||||
process.WaitForExit();
|
||||
|
||||
if (process.ExitCode != 0)
|
||||
{
|
||||
throw new DatabaseBackupException(
|
||||
$"Backup command '{psi.FileName}' exited with code {process.ExitCode}. stderr: {stderr}");
|
||||
}
|
||||
|
||||
var fileInfo = new FileInfo(backupFilePath);
|
||||
if (!fileInfo.Exists || fileInfo.Length == 0)
|
||||
{
|
||||
throw new DatabaseBackupException(
|
||||
$"mysqldump completed but backup file is empty or missing: {backupFilePath}");
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.backup_complete",
|
||||
null, new[] { backupFilePath, fileInfo.Length.ToString() });
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Writes a clearly formatted, copy-pasteable restore command to the log.
|
||||
/// Called immediately after a migration failure so that operators have
|
||||
/// everything they need to recover without searching documentation.
|
||||
/// </summary>
|
||||
/// <param name="backupFilePath">The backup file produced before migration began.</param>
|
||||
/// <param name="failedVersion">The schema version that failed.</param>
|
||||
/// <param name="failureReason">Short description of what went wrong.</param>
|
||||
public static void LogRestoreInstructions(string backupFilePath, int failedVersion, string failureReason)
|
||||
{
|
||||
var cfg = Config.DatabaseConfiguration;
|
||||
var provider = ResolveProvider();
|
||||
string restoreCommand = BuildRestoreCommand(provider, backupFilePath);
|
||||
|
||||
Logging.LogKey(Logging.LogType.Critical, "process.database",
|
||||
"database.migration_failed_restore_instructions",
|
||||
null, new[]
|
||||
{
|
||||
failedVersion.ToString(),
|
||||
failureReason,
|
||||
backupFilePath,
|
||||
restoreCommand
|
||||
});
|
||||
|
||||
// Also write directly to console so it is always visible regardless of
|
||||
// log-level filtering or log-sink failures during critical failure paths.
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("=============================================================");
|
||||
Console.Error.WriteLine(" DATABASE MIGRATION FAILED");
|
||||
Console.Error.WriteLine("=============================================================");
|
||||
Console.Error.WriteLine($" Failed at schema version : {failedVersion}");
|
||||
Console.Error.WriteLine($" Reason : {failureReason}");
|
||||
Console.Error.WriteLine($" Backup file : {backupFilePath}");
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine(" To restore your database, run:");
|
||||
Console.Error.WriteLine($" {restoreCommand}");
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine(" Or using the Gaseous CLI:");
|
||||
Console.Error.WriteLine($" gaseous-cli db restore \"{backupFilePath}\"");
|
||||
Console.Error.WriteLine("=============================================================");
|
||||
Console.Error.WriteLine();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Restores a database from a previously created SQL backup file.
|
||||
/// Intended for use by the CLI restore command.
|
||||
/// </summary>
|
||||
/// <param name="backupFilePath">Path to the .sql dump file to restore.</param>
|
||||
public static void Restore(string backupFilePath)
|
||||
{
|
||||
if (!File.Exists(backupFilePath))
|
||||
{
|
||||
throw new FileNotFoundException($"Backup file not found: {backupFilePath}");
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.database", "database.restore_starting",
|
||||
null, new[] { backupFilePath });
|
||||
|
||||
var provider = ResolveProvider();
|
||||
var psi = BuildRestoreProcessStartInfo(provider);
|
||||
|
||||
using var process = Process.Start(psi)
|
||||
?? throw new DatabaseBackupException($"Failed to start restore process '{psi.FileName}'.");
|
||||
|
||||
using (var fileStream = File.OpenRead(backupFilePath))
|
||||
{
|
||||
fileStream.CopyTo(process.StandardInput.BaseStream);
|
||||
}
|
||||
process.StandardInput.Close();
|
||||
|
||||
string stderr = process.StandardError.ReadToEnd();
|
||||
process.WaitForExit();
|
||||
|
||||
if (process.ExitCode != 0)
|
||||
{
|
||||
throw new DatabaseBackupException(
|
||||
$"Restore command '{psi.FileName}' exited with code {process.ExitCode}. stderr: {stderr}");
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.restore_complete",
|
||||
null, new[] { backupFilePath });
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Chooses the backup provider from the configured database engine.
|
||||
/// </summary>
|
||||
private static BackupProvider ResolveProvider()
|
||||
{
|
||||
string engine = (Config.DatabaseConfiguration.DatabaseEngine ?? "mysql")
|
||||
.Trim()
|
||||
.ToLowerInvariant();
|
||||
|
||||
if (engine == "postgres" || engine == "postgresql" || engine == "pg")
|
||||
{
|
||||
return BackupProvider.PostgreSql;
|
||||
}
|
||||
|
||||
// Treat mysql and mariadb as MySQL protocol compatible engines.
|
||||
return BackupProvider.MySqlLike;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the process start information used to create a backup.
|
||||
/// </summary>
|
||||
private static ProcessStartInfo BuildBackupProcessStartInfo(BackupProvider provider, string backupFilePath)
|
||||
{
|
||||
var cfg = Config.DatabaseConfiguration;
|
||||
|
||||
switch (provider)
|
||||
{
|
||||
case BackupProvider.PostgreSql:
|
||||
{
|
||||
string command = ResolveRequiredCommand(new[] { "pg_dump" });
|
||||
var psi = new ProcessStartInfo
|
||||
{
|
||||
FileName = command,
|
||||
Arguments = $"--host={cfg.HostName} --port={cfg.Port} --username={cfg.UserName} --format=plain --file=\"{backupFilePath}\" {cfg.DatabaseName}",
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
psi.Environment["PGPASSWORD"] = cfg.Password;
|
||||
return psi;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
// Prefer mariadb-dump when available, then fall back to mysqldump.
|
||||
string command = ResolveRequiredCommand(new[] { "mariadb-dump", "mysqldump" });
|
||||
var psi = new ProcessStartInfo
|
||||
{
|
||||
FileName = command,
|
||||
Arguments = $"--host={cfg.HostName} --port={cfg.Port} --user={cfg.UserName} --single-transaction --routines --triggers --events --result-file=\"{backupFilePath}\" {cfg.DatabaseName}",
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
psi.Environment["MYSQL_PWD"] = cfg.Password;
|
||||
return psi;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the process start information used to restore a backup.
|
||||
/// </summary>
|
||||
private static ProcessStartInfo BuildRestoreProcessStartInfo(BackupProvider provider)
|
||||
{
|
||||
var cfg = Config.DatabaseConfiguration;
|
||||
|
||||
switch (provider)
|
||||
{
|
||||
case BackupProvider.PostgreSql:
|
||||
{
|
||||
string command = ResolveRequiredCommand(new[] { "psql" });
|
||||
var psi = new ProcessStartInfo
|
||||
{
|
||||
FileName = command,
|
||||
Arguments = $"--host={cfg.HostName} --port={cfg.Port} --username={cfg.UserName} --dbname={cfg.DatabaseName}",
|
||||
RedirectStandardInput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
psi.Environment["PGPASSWORD"] = cfg.Password;
|
||||
return psi;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
string command = ResolveRequiredCommand(new[] { "mariadb", "mysql" });
|
||||
var psi = new ProcessStartInfo
|
||||
{
|
||||
FileName = command,
|
||||
Arguments = $"--host={cfg.HostName} --port={cfg.Port} --user={cfg.UserName} {cfg.DatabaseName}",
|
||||
RedirectStandardInput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
psi.Environment["MYSQL_PWD"] = cfg.Password;
|
||||
return psi;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a shell command string that operators can use to restore a backup manually.
|
||||
/// </summary>
|
||||
private static string BuildRestoreCommand(BackupProvider provider, string backupFilePath)
|
||||
{
|
||||
var cfg = Config.DatabaseConfiguration;
|
||||
|
||||
switch (provider)
|
||||
{
|
||||
case BackupProvider.PostgreSql:
|
||||
return $"PGPASSWORD=<YOUR_PASSWORD> psql --host={cfg.HostName} --port={cfg.Port} --username={cfg.UserName} --dbname={cfg.DatabaseName} < \"{backupFilePath}\"";
|
||||
|
||||
default:
|
||||
return $"MYSQL_PWD=<YOUR_PASSWORD> mariadb --host={cfg.HostName} --port={cfg.Port} --user={cfg.UserName} {cfg.DatabaseName} < \"{backupFilePath}\" # or mysql";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the first available command from the supplied candidate list.
|
||||
/// </summary>
|
||||
private static string ResolveRequiredCommand(string[] candidates)
|
||||
{
|
||||
foreach (string candidate in candidates)
|
||||
{
|
||||
if (IsCommandAvailable(candidate))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
throw new DatabaseBackupException(
|
||||
"No suitable backup/restore command was found on PATH. Tried: " +
|
||||
string.Join(", ", candidates) +
|
||||
". Install one of these tools or adjust your DatabaseEngine setting.");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks whether a command is present on the current PATH.
|
||||
/// </summary>
|
||||
private static bool IsCommandAvailable(string command)
|
||||
{
|
||||
string? pathEnv = Environment.GetEnvironmentVariable("PATH");
|
||||
if (string.IsNullOrWhiteSpace(pathEnv)) return false;
|
||||
|
||||
string[] pathEntries = pathEnv.Split(Path.PathSeparator, StringSplitOptions.RemoveEmptyEntries);
|
||||
foreach (string dir in pathEntries)
|
||||
{
|
||||
string fullPath = Path.Combine(dir, command);
|
||||
if (File.Exists(fullPath)) return true;
|
||||
|
||||
// Windows compatibility for future local builds
|
||||
if (OperatingSystem.IsWindows())
|
||||
{
|
||||
string exePath = Path.Combine(dir, command + ".exe");
|
||||
if (File.Exists(exePath)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a backup or restore failure that should stop migration execution.
|
||||
/// </summary>
|
||||
public class DatabaseBackupException : Exception
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a backup exception with a descriptive error message.
|
||||
/// </summary>
|
||||
public DatabaseBackupException(string message) : base(message) { }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a backup exception with a descriptive error message and inner exception.
|
||||
/// </summary>
|
||||
public DatabaseBackupException(string message, Exception inner) : base(message, inner) { }
|
||||
}
|
||||
}
|
||||
878
gaseous-lib/Classes/Database/DatabaseMigration.cs
Normal file
878
gaseous-lib/Classes/Database/DatabaseMigration.cs
Normal file
|
|
@ -0,0 +1,878 @@
|
|||
using System;
|
||||
using System.Data;
|
||||
using System.Reflection;
|
||||
using System.Reflection.Metadata;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Classes.Metadata;
|
||||
using gaseous_server.Classes.Plugins.MetadataProviders;
|
||||
using gaseous_server.Models;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
public static class DatabaseMigration
|
||||
{
|
||||
public static List<int> BackgroundUpgradeTargetSchemaVersions = new List<int>();
|
||||
|
||||
/// <summary>
|
||||
/// Safely adds or updates a key in a parameter dictionary without throwing on duplicate keys.
|
||||
/// Use instead of dict.Add() everywhere in migration code to prevent "key already added" errors
|
||||
/// when the same dbDict instance is reused across steps.
|
||||
/// </summary>
|
||||
public static void AddOrSet(Dictionary<string, object> dict, string key, object value)
|
||||
{
|
||||
dict[key] = value;
|
||||
}
|
||||
|
||||
public static async Task PreUpgradeScript(int TargetSchemaVersion, Database.databaseType? DatabaseType)
|
||||
{
|
||||
// load resources
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
DataTable data;
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.checking_pre_upgrade_for_schema_version", null, new[] { TargetSchemaVersion.ToString() });
|
||||
|
||||
switch (DatabaseType)
|
||||
{
|
||||
case Database.databaseType.MySql:
|
||||
switch (TargetSchemaVersion)
|
||||
{
|
||||
case 1005:
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.running_pre_upgrade_for_schema_version", null, new[] { TargetSchemaVersion.ToString() });
|
||||
|
||||
// there was a mistake at dbschema version 1004-1005
|
||||
// the first preview release of v1.7 reused dbschema version 1004
|
||||
// if table "Relation_Game_AgeRatings" exists - then we need to apply the gaseous-fix-1005.sql script before applying the standard 1005 script
|
||||
sql = "SELECT table_name FROM information_schema.tables WHERE table_schema = @dbname AND table_name = @tablename;";
|
||||
dbDict.Add("dbname", Config.DatabaseConfiguration.DatabaseName);
|
||||
dbDict.Add("tablename", "Relation_Game_AgeRatings");
|
||||
data = await db.ExecuteCMDAsync(sql, dbDict);
|
||||
if (data.Rows.Count == 0)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.schema_version_requires_missing_table", null, new[] { TargetSchemaVersion.ToString() });
|
||||
|
||||
string resourceName = "gaseous_lib.Support.Database.MySQL.gaseous-fix-1005.sql";
|
||||
string dbScript = "";
|
||||
|
||||
string[] resources = Assembly.GetExecutingAssembly().GetManifestResourceNames();
|
||||
if (resources.Contains(resourceName))
|
||||
{
|
||||
using (Stream stream = assembly.GetManifestResourceStream(resourceName))
|
||||
using (StreamReader reader = new StreamReader(stream))
|
||||
{
|
||||
dbScript = await reader.ReadToEndAsync();
|
||||
|
||||
// apply schema!
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.applying_schema_version_fix_prior_to", null, new[] { "1005" });
|
||||
await db.ExecuteCMDAsync(dbScript, dbDict, 180);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 1027:
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.running_pre_upgrade_for_schema_version", null, new[] { TargetSchemaVersion.ToString() });
|
||||
// create the basic relation tables
|
||||
// this is a blocking task
|
||||
await Storage.CreateRelationsTables<IGDB.Models.Game>();
|
||||
await Storage.CreateRelationsTables<IGDB.Models.Platform>();
|
||||
|
||||
// drop source id from all metadata tables if it exists
|
||||
var tablesToDropSourceId = new List<string>
|
||||
{
|
||||
"AgeGroup","AgeRating","AgeRatingContentDescription","AlternativeName","Artwork","Collection","Company","CompanyLogo","Cover","ExternalGame","Franchise","Game","GameMode","GameVideo","Genre","InvolvedCompany","MultiplayerMode","Platform","PlatformLogo","PlatformVersion","PlayerPerspective","ReleaseDate","Screenshot","Theme","GameLocalization","Region"
|
||||
};
|
||||
foreach (var table in tablesToDropSourceId)
|
||||
{
|
||||
// check if the column exists
|
||||
sql = $"SELECT * FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = '{Config.DatabaseConfiguration.DatabaseName}' AND TABLE_NAME = '{table}' AND COLUMN_NAME = 'SourceId';";
|
||||
dbDict.Clear();
|
||||
data = await db.ExecuteCMDAsync(sql, dbDict);
|
||||
if (data.Rows.Count > 0)
|
||||
{
|
||||
// column exists, drop it
|
||||
sql = $"ALTER TABLE {table} DROP COLUMN SourceId;"; // MySQL does not support IF EXISTS in ALTER TABLE
|
||||
await db.ExecuteCMDAsync(sql, dbDict);
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.dropped_sourceid_column_from_table", null, new[] { table });
|
||||
}
|
||||
|
||||
switch (table)
|
||||
{
|
||||
case "ReleaseDate":
|
||||
// check if month and/or year columns exist
|
||||
sql = $"SELECT * FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = '{Config.DatabaseConfiguration.DatabaseName}' AND TABLE_NAME = '{table}' AND COLUMN_NAME IN ('Month', 'Year');";
|
||||
data = await db.ExecuteCMDAsync(sql, dbDict);
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
sql = "";
|
||||
if (row["COLUMN_NAME"].ToString() == "Month")
|
||||
{
|
||||
sql += "ALTER TABLE ReleaseDate DROP COLUMN Month, CHANGE `m` `Month` int(11) DEFAULT NULL;";
|
||||
}
|
||||
if (row["COLUMN_NAME"].ToString() == "Year")
|
||||
{
|
||||
sql += "ALTER TABLE ReleaseDate DROP COLUMN Year, CHANGE `y` `Year` int(11) DEFAULT NULL;";
|
||||
}
|
||||
if (!string.IsNullOrEmpty(sql))
|
||||
{
|
||||
await db.ExecuteCMDAsync(sql, dbDict);
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.dropped_column_from_releasedate_table", null, new[] { row["COLUMN_NAME"].ToString() ?? "" });
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 1031:
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.running_pre_upgrade_for_schema_version", null, new[] { TargetSchemaVersion.ToString() });
|
||||
// build tables for metadata storage
|
||||
TableBuilder_1031.BuildTables_1031();
|
||||
sql = "RENAME TABLE AgeGroup TO Metadata_AgeGroup; RENAME TABLE ClearLogo TO Metadata_ClearLogo;";
|
||||
dbDict.Clear();
|
||||
await db.ExecuteCMDAsync(sql, dbDict);
|
||||
break;
|
||||
|
||||
case 1035:
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.running_pre_upgrade_for_schema_version", null, new[] { TargetSchemaVersion.ToString() });
|
||||
|
||||
// ensure that the relation tables for games and platforms are built before we attempt to update the database schema
|
||||
await Storage.CreateRelationsTables<IGDB.Models.Game>();
|
||||
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public static void PostUpgradeScript(int TargetSchemaVersion, Database.databaseType? DatabaseType)
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
DataTable data;
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database",
|
||||
"database.running_post_upgrade_for_schema_version", null, new[] { TargetSchemaVersion.ToString() });
|
||||
|
||||
switch (DatabaseType)
|
||||
{
|
||||
case Database.databaseType.MySql:
|
||||
switch (TargetSchemaVersion)
|
||||
{
|
||||
case 1002:
|
||||
// this is a safe background task
|
||||
BackgroundUpgradeTargetSchemaVersions.Add(1002);
|
||||
break;
|
||||
|
||||
case 1004:
|
||||
// needs to run on start up
|
||||
|
||||
// copy root path to new libraries format
|
||||
string oldRoot = Path.Combine(Config.LibraryConfiguration.LibraryRootDirectory, "Library");
|
||||
sql = "INSERT INTO GameLibraries (Name, Path, DefaultLibrary, DefaultPlatform) VALUES (@name, @path, @defaultlibrary, @defaultplatform); SELECT CAST(LAST_INSERT_ID() AS SIGNED);";
|
||||
AddOrSet(dbDict, "name", "Default");
|
||||
AddOrSet(dbDict, "path", oldRoot);
|
||||
AddOrSet(dbDict, "defaultlibrary", 1);
|
||||
AddOrSet(dbDict, "defaultplatform", 0);
|
||||
data = db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// apply the new library id to the existing roms
|
||||
sql = "UPDATE Games_Roms SET LibraryId=@libraryid;";
|
||||
dbDict.Clear();
|
||||
AddOrSet(dbDict, "libraryid", data.Rows[0][0]);
|
||||
db.ExecuteCMD(sql, dbDict);
|
||||
break;
|
||||
|
||||
case 1016:
|
||||
// delete old format LastRun_* settings from settings table
|
||||
sql = "DELETE FROM Settings WHERE Setting LIKE 'LastRun_%';";
|
||||
db.ExecuteNonQuery(sql);
|
||||
break;
|
||||
|
||||
case 1023:
|
||||
// load country list
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.adding_country_lookup_table_contents");
|
||||
|
||||
string countryResourceName = "gaseous_lib.Support.Country.txt";
|
||||
using (Stream stream = assembly.GetManifestResourceStream(countryResourceName))
|
||||
using (StreamReader reader = new StreamReader(stream))
|
||||
{
|
||||
do
|
||||
{
|
||||
string[] line = reader.ReadLine().Split("|");
|
||||
|
||||
sql = "INSERT INTO Country (Code, Value) VALUES (@code, @value);";
|
||||
dbDict = new Dictionary<string, object>{
|
||||
{ "code", line[0] },
|
||||
{ "value", line[1] }
|
||||
};
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
} while (reader.EndOfStream == false);
|
||||
}
|
||||
|
||||
// load language list
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.adding_language_lookup_table_contents");
|
||||
|
||||
string languageResourceName = "gaseous_lib.Support.Language.txt";
|
||||
using (Stream stream = assembly.GetManifestResourceStream(languageResourceName))
|
||||
using (StreamReader reader = new StreamReader(stream))
|
||||
{
|
||||
do
|
||||
{
|
||||
string[] line = reader.ReadLine().Split("|");
|
||||
|
||||
sql = "INSERT INTO Language (Code, Value) VALUES (@code, @value);";
|
||||
dbDict = new Dictionary<string, object>{
|
||||
{ "code", line[0] },
|
||||
{ "value", line[1] }
|
||||
};
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
} while (reader.EndOfStream == false);
|
||||
}
|
||||
break;
|
||||
|
||||
case 1024:
|
||||
// attempt to re-import signature dats
|
||||
|
||||
// delete existing signature sources to allow re-import
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.deleting_existing_signature_sources");
|
||||
sql = "DELETE FROM Signatures_Sources;";
|
||||
db.ExecuteNonQuery(sql);
|
||||
|
||||
_ = MySql_1024_MigrateMetadataVersion();
|
||||
|
||||
break;
|
||||
|
||||
case 1027:
|
||||
// create profiles for all existing users
|
||||
sql = "SELECT * FROM Users;";
|
||||
data = db.ExecuteCMD(sql);
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
// get legacy avatar from UserAvatars table
|
||||
sql = "SELECT Avatar FROM UserAvatars WHERE UserId = @userid;";
|
||||
dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "userid", row["Id"] }
|
||||
};
|
||||
DataTable avatarData = db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
sql = "INSERT INTO UserProfiles (Id, UserId, DisplayName, Quip, Avatar, AvatarExtension, UnstructuredData) VALUES (@id, @userid, @displayname, @quip, @avatar, @avatarextension, @data);";
|
||||
dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "id", Guid.NewGuid() },
|
||||
{ "userid", row["Id"] },
|
||||
{ "displayname", row["Email"] },
|
||||
{ "quip", "" },
|
||||
{ "avatar", avatarData.Rows.Count > 0 ? avatarData.Rows[0]["Avatar"] : null },
|
||||
{ "avatarextension", avatarData.Rows.Count > 0 ? ".jpg" : null },
|
||||
{ "data", "{}" }
|
||||
};
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
}
|
||||
|
||||
// update all rom paths to use the new format
|
||||
sql = "SELECT * FROM GameLibraries;";
|
||||
data = db.ExecuteCMD(sql);
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
sql = "SELECT * FROM Games_Roms WHERE LibraryId = @libraryid;";
|
||||
dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "libraryid", row["Id"] }
|
||||
};
|
||||
DataTable romData = db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
string libraryRootPath = (string)row["Path"];
|
||||
if (libraryRootPath.EndsWith(Path.DirectorySeparatorChar.ToString()) == false)
|
||||
{
|
||||
libraryRootPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
bool GetLastThreeElements = false;
|
||||
if ((int)row["DefaultLibrary"] == 1)
|
||||
{
|
||||
GetLastThreeElements = true;
|
||||
}
|
||||
|
||||
foreach (DataRow romRow in romData.Rows)
|
||||
{
|
||||
string existingPath = (string)romRow["RelativePath"];
|
||||
string newPath = "";
|
||||
|
||||
if (GetLastThreeElements == true)
|
||||
{
|
||||
// strip all but the last 3 elements from existingPath separated by directory separator
|
||||
// this mode only works for the default library
|
||||
string[] pathParts = existingPath.Split(Path.DirectorySeparatorChar);
|
||||
if (pathParts.Length > 3)
|
||||
{
|
||||
newPath = Path.Combine(pathParts[pathParts.Length - 3], pathParts[pathParts.Length - 2], pathParts[pathParts.Length - 1]);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Path does not have the expected 3-segment structure
|
||||
// (platform/game/romfile). Using as-is; this may indicate
|
||||
// a ROM that was placed outside the managed library structure.
|
||||
if (pathParts.Length != 3)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.database",
|
||||
"database.rom_path_unexpected_segment_count",
|
||||
null, new[] { existingPath, pathParts.Length.ToString() });
|
||||
}
|
||||
newPath = existingPath;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// strip the library root path from the existing path
|
||||
if (existingPath.StartsWith(libraryRootPath))
|
||||
{
|
||||
newPath = existingPath.Substring(libraryRootPath.Length);
|
||||
}
|
||||
else
|
||||
{
|
||||
newPath = existingPath;
|
||||
}
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.updating_rom_path_from_to", null, new[] { existingPath, newPath });
|
||||
|
||||
sql = "UPDATE Games_Roms SET RelativePath = @newpath WHERE Id = @id;";
|
||||
dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "newpath", newPath },
|
||||
{ "id", romRow["Id"] }
|
||||
};
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
}
|
||||
}
|
||||
|
||||
// get all tables that have the prefix "Relation_" and drop them
|
||||
sql = "SELECT table_name FROM information_schema.tables WHERE table_schema = @dbname AND table_name LIKE 'Relation_%';";
|
||||
dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "dbname", Config.DatabaseConfiguration.DatabaseName }
|
||||
};
|
||||
data = db.ExecuteCMD(sql, dbDict);
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
sql = "DROP TABLE " + (string)row["table_name"] + ";";
|
||||
db.ExecuteNonQuery(sql);
|
||||
}
|
||||
|
||||
// migrating metadata is a safe background task
|
||||
BackgroundUpgradeTargetSchemaVersions.Add(1024);
|
||||
break;
|
||||
|
||||
case 1031:
|
||||
// update Metadata_Platform SourceId to 0
|
||||
sql = "UPDATE Metadata_Platform SET SourceId = 0;";
|
||||
db.ExecuteNonQuery(sql);
|
||||
|
||||
// update Gmes_Roms to MetadataId = 0
|
||||
sql = "UPDATE Games_Roms SET GameId = 0;";
|
||||
db.ExecuteNonQuery(sql);
|
||||
|
||||
DatabaseMigration.BackgroundUpgradeTargetSchemaVersions.Add(1031);
|
||||
break;
|
||||
|
||||
case 1038:
|
||||
MySql_1038_MigrateDateSettings();
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public static void MySql_1038_MigrateDateSettings()
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = @"DELETE FROM Settings
|
||||
WHERE ValueType = 0
|
||||
AND (
|
||||
`Setting` IN ('LastContentChange', 'LastLibraryChange', 'LastMetadataChange', 'LastMetadataRefresh')
|
||||
OR `Setting` LIKE 'LastRun_%'
|
||||
);";
|
||||
|
||||
int deletedCount = db.ExecuteNonQuery(sql);
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.migrated_legacy_date_settings_total", null, new[] { deletedCount.ToString() });
|
||||
}
|
||||
|
||||
public static async Task UpgradeScriptBackgroundTasks()
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.starting_background_upgrade_tasks");
|
||||
foreach (int TargetSchemaVersion in BackgroundUpgradeTargetSchemaVersions)
|
||||
{
|
||||
try
|
||||
{
|
||||
switch (TargetSchemaVersion)
|
||||
{
|
||||
case 1002:
|
||||
MySql_1002_MigrateMetadataVersion();
|
||||
break;
|
||||
|
||||
case 1031:
|
||||
await MySql_1031_MigrateMetadataVersion();
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.database", "database.error_during_background_upgrade_for_schema_version", null, new[] { TargetSchemaVersion.ToString() }, ex);
|
||||
}
|
||||
}
|
||||
|
||||
// perform any metadata table migrations that are needed
|
||||
await gaseous_server.Classes.Metadata.Utility.MetadataTableBuilder.BuildTableFromType("gaseous", "Metadata", typeof(gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes.Game), "", "NameThe, AgeGroupId");
|
||||
await gaseous_server.Classes.Metadata.Utility.MetadataTableBuilder.BuildTableFromType("gaseous", "Metadata", typeof(gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes.GameLocalization), "", "NameThe");
|
||||
}
|
||||
|
||||
public static void MySql_1002_MigrateMetadataVersion()
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
|
||||
// update signature roms to v2
|
||||
sql = "SELECT Id, Flags, Attributes, IngestorVersion FROM Signatures_Roms WHERE IngestorVersion = 1";
|
||||
DataTable data = db.ExecuteCMD(sql);
|
||||
if (data.Rows.Count > 0)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "process.signature_ingest", "database.update_updating_database_entries_total", null, new[] { data.Rows.Count.ToString() });
|
||||
int Counter = 0;
|
||||
int LastCounterCheck = 0;
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
List<string> Flags = Newtonsoft.Json.JsonConvert.DeserializeObject<List<string>>((string)Common.ReturnValueIfNull(row["flags"], "[]"));
|
||||
List<KeyValuePair<string, object>> Attributes = new List<KeyValuePair<string, object>>();
|
||||
foreach (string Flag in Flags)
|
||||
{
|
||||
if (Flag.StartsWith("a"))
|
||||
{
|
||||
Attributes.Add(
|
||||
new KeyValuePair<string, object>(
|
||||
"a",
|
||||
Flag
|
||||
)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] FlagCompare = Flag.Split(' ');
|
||||
switch (FlagCompare[0].Trim().ToLower())
|
||||
{
|
||||
case "cr":
|
||||
// cracked
|
||||
case "f":
|
||||
// fixed
|
||||
case "h":
|
||||
// hacked
|
||||
case "m":
|
||||
// modified
|
||||
case "p":
|
||||
// pirated
|
||||
case "t":
|
||||
// trained
|
||||
case "tr":
|
||||
// translated
|
||||
case "o":
|
||||
// overdump
|
||||
case "u":
|
||||
// underdump
|
||||
case "v":
|
||||
// virus
|
||||
case "b":
|
||||
// bad dump
|
||||
case "a":
|
||||
// alternate
|
||||
case "!":
|
||||
// known verified dump
|
||||
// -------------------
|
||||
string shavedToken = Flag.Substring(FlagCompare[0].Trim().Length).Trim();
|
||||
Attributes.Add(new KeyValuePair<string, object>(
|
||||
FlagCompare[0].Trim().ToLower(),
|
||||
shavedToken
|
||||
));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
string AttributesJson;
|
||||
if (Attributes.Count > 0)
|
||||
{
|
||||
AttributesJson = Newtonsoft.Json.JsonConvert.SerializeObject(Attributes);
|
||||
}
|
||||
else
|
||||
{
|
||||
AttributesJson = "[]";
|
||||
}
|
||||
|
||||
string updateSQL = "UPDATE Signatures_Roms SET Attributes=@attributes, IngestorVersion=2 WHERE Id=@id";
|
||||
dbDict = new Dictionary<string, object>();
|
||||
AddOrSet(dbDict, "attributes", AttributesJson);
|
||||
AddOrSet(dbDict, "id", (int)row["Id"]);
|
||||
db.ExecuteCMD(updateSQL, dbDict);
|
||||
|
||||
Counter += 1;
|
||||
if ((Counter - LastCounterCheck) >= 100 || Counter == data.Rows.Count)
|
||||
{
|
||||
LastCounterCheck = Counter;
|
||||
Logging.LogKey(Logging.LogType.Information, "process.signature_ingest",
|
||||
"database.update_updating_database_entries_progress",
|
||||
null, new[] { Counter.ToString(), data.Rows.Count.ToString() });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task MySql_1024_MigrateMetadataVersion()
|
||||
{
|
||||
FileSignature fileSignature = new FileSignature();
|
||||
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
// Check if the view exists before proceeding
|
||||
string sql = "SELECT table_name FROM information_schema.views WHERE table_schema = @dbname AND table_name = 'view_Games_Roms';";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "dbname", Config.DatabaseConfiguration.DatabaseName }
|
||||
};
|
||||
DataTable viewCheck = await db.ExecuteCMDAsync(sql, dbDict);
|
||||
if (viewCheck.Rows.Count == 0)
|
||||
{
|
||||
// View doesn't exist, skip migration
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.view_does_not_exist_skipping_migration", null, new[] { "view_Games_Roms" });
|
||||
return;
|
||||
}
|
||||
|
||||
sql = "SELECT * FROM view_Games_Roms WHERE RomDataVersion = 1;";
|
||||
DataTable data = await db.ExecuteCMDAsync(sql);
|
||||
long count = 1;
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.migration_updating_rom_table_for_rom", null, new[] { count.ToString(), data.Rows.Count.ToString(), (string)row["Name"] });
|
||||
|
||||
GameLibrary.LibraryItem library = await GameLibrary.GetLibrary((int)row["LibraryId"]);
|
||||
HashObject hash = new HashObject()
|
||||
{
|
||||
md5hash = (string)row["MD5"],
|
||||
sha1hash = (string)row["SHA1"]
|
||||
};
|
||||
|
||||
FileSignature.FileHash fileHash = new FileSignature.FileHash()
|
||||
{
|
||||
Library = library,
|
||||
Hash = hash,
|
||||
FileName = (string)row["RelativePath"]
|
||||
};
|
||||
|
||||
var (_, signature) = await fileSignature.GetFileSignatureAsync(
|
||||
library,
|
||||
fileHash
|
||||
);
|
||||
|
||||
gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes.Platform platform = await Platforms.GetPlatform((long)row["PlatformId"]);
|
||||
|
||||
await ImportGame.StoreGame(library, hash, signature, platform, (string)row["Path"], (long)row["Id"]);
|
||||
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task MySql_1031_MigrateMetadataVersion()
|
||||
{
|
||||
// get the database migration task
|
||||
foreach (ProcessQueue.QueueProcessor.QueueItem qi in ProcessQueue.QueueProcessor.QueueItems)
|
||||
{
|
||||
if (qi.ItemType == ProcessQueue.QueueItemType.BackgroundDatabaseUpgrade)
|
||||
{
|
||||
await qi.AddSubTask(ProcessQueue.QueueItemSubTasks.MetadataRefresh_Platform, "Platform Metadata", null, false);
|
||||
await qi.AddSubTask(ProcessQueue.QueueItemSubTasks.MetadataRefresh_Signatures, "Signature Metadata", null, false);
|
||||
await qi.AddSubTask(ProcessQueue.QueueItemSubTasks.MetadataRefresh_Game, "Game Metadata", null, false);
|
||||
await qi.AddSubTask(ProcessQueue.QueueItemSubTasks.DatabaseMigration_1031, "Database Migration 1031", null, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task RunMigration1031()
|
||||
{
|
||||
// migrate favourites
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "SELECT * FROM Users;";
|
||||
DataTable data = db.ExecuteCMD(sql);
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
// get the user's favourites
|
||||
sql = "SELECT * FROM Favourites WHERE UserId = @userid;";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "userid", row["Id"] }
|
||||
};
|
||||
DataTable favouritesData = db.ExecuteCMD(sql, dbDict);
|
||||
|
||||
// copy the users favourites into an array of long
|
||||
List<long> favourites = new List<long>();
|
||||
foreach (DataRow favouriteRow in favouritesData.Rows)
|
||||
{
|
||||
favourites.Add((long)favouriteRow["GameId"]);
|
||||
}
|
||||
|
||||
// delete the existing favourites
|
||||
sql = "DELETE FROM Favourites WHERE UserId = @userid;";
|
||||
dbDict.Clear();
|
||||
dbDict.Add("userid", row["Id"]);
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
|
||||
// lookup the metadata objects using the GameId, and add the metadataid as a new favourite
|
||||
foreach (long gameId in favourites)
|
||||
{
|
||||
sql = "SELECT DISTINCT ParentMapId FROM MetadataMapBridge WHERE MetadataSourceType = 1 AND MetadataSourceId = @gameid;";
|
||||
dbDict.Clear();
|
||||
dbDict.Add("gameid", gameId);
|
||||
DataTable metadataData = db.ExecuteCMD(sql, dbDict);
|
||||
if (metadataData.Rows.Count > 0)
|
||||
{
|
||||
Favourites metadataFavourites = new Favourites();
|
||||
metadataFavourites.SetFavourite((string)row["Id"], (long)metadataData.Rows[0]["ParentMapId"], true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// migrate media groups
|
||||
sql = "SELECT DISTINCT RomMediaGroup.Id, Games_Roms.MetadataMapId FROM RomMediaGroup_Members JOIN RomMediaGroup ON RomMediaGroup_Members.GroupId = RomMediaGroup.Id JOIN Games_Roms ON RomMediaGroup_Members.RomId = Games_Roms.Id;";
|
||||
data = db.ExecuteCMD(sql);
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
// set the media group for each media group
|
||||
sql = "UPDATE RomMediaGroup SET GameId = @gameid WHERE Id = @id;";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "gameid", row["MetadataMapId"] },
|
||||
{ "id", row["Id"] }
|
||||
};
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TableBuilder_1031
|
||||
{
|
||||
public static void BuildTables_1031()
|
||||
{
|
||||
BuildTableFromType(typeof(IGDB.Models.AgeRating));
|
||||
BuildTableFromType(typeof(IGDB.Models.AgeRatingCategory));
|
||||
BuildTableFromType(typeof(IGDB.Models.AgeRatingContentDescriptionV2));
|
||||
BuildTableFromType(typeof(IGDB.Models.AgeRatingOrganization));
|
||||
BuildTableFromType(typeof(IGDB.Models.AlternativeName));
|
||||
BuildTableFromType(typeof(IGDB.Models.Artwork));
|
||||
BuildTableFromType(typeof(IGDB.Models.Character));
|
||||
BuildTableFromType(typeof(IGDB.Models.CharacterGender));
|
||||
BuildTableFromType(typeof(IGDB.Models.CharacterMugShot));
|
||||
BuildTableFromType(typeof(IGDB.Models.CharacterSpecies));
|
||||
BuildTableFromType(typeof(IGDB.Models.Collection));
|
||||
BuildTableFromType(typeof(IGDB.Models.CollectionMembership));
|
||||
BuildTableFromType(typeof(IGDB.Models.CollectionMembershipType));
|
||||
BuildTableFromType(typeof(IGDB.Models.CollectionRelation));
|
||||
BuildTableFromType(typeof(IGDB.Models.CollectionRelationType));
|
||||
BuildTableFromType(typeof(IGDB.Models.CollectionType));
|
||||
BuildTableFromType(typeof(IGDB.Models.Company));
|
||||
BuildTableFromType(typeof(IGDB.Models.CompanyLogo));
|
||||
BuildTableFromType(typeof(IGDB.Models.CompanyStatus));
|
||||
BuildTableFromType(typeof(IGDB.Models.CompanyWebsite));
|
||||
BuildTableFromType(typeof(IGDB.Models.Cover));
|
||||
BuildTableFromType(typeof(IGDB.Models.Event));
|
||||
BuildTableFromType(typeof(IGDB.Models.EventLogo));
|
||||
BuildTableFromType(typeof(IGDB.Models.EventNetwork));
|
||||
BuildTableFromType(typeof(IGDB.Models.ExternalGame));
|
||||
BuildTableFromType(typeof(IGDB.Models.ExternalGameSource));
|
||||
BuildTableFromType(typeof(IGDB.Models.Franchise));
|
||||
BuildTableFromType(typeof(IGDB.Models.Game));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameEngine));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameEngineLogo));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameLocalization));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameMode));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameReleaseFormat));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameStatus));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameTimeToBeat));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameType));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameVersion));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameVersionFeature));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameVersionFeatureValue));
|
||||
BuildTableFromType(typeof(IGDB.Models.GameVideo));
|
||||
BuildTableFromType(typeof(IGDB.Models.Genre));
|
||||
BuildTableFromType(typeof(IGDB.Models.InvolvedCompany));
|
||||
BuildTableFromType(typeof(IGDB.Models.Keyword));
|
||||
BuildTableFromType(typeof(IGDB.Models.Language));
|
||||
BuildTableFromType(typeof(IGDB.Models.LanguageSupport));
|
||||
BuildTableFromType(typeof(IGDB.Models.LanguageSupportType));
|
||||
BuildTableFromType(typeof(IGDB.Models.MultiplayerMode));
|
||||
BuildTableFromType(typeof(IGDB.Models.NetworkType));
|
||||
BuildTableFromType(typeof(IGDB.Models.Platform));
|
||||
BuildTableFromType(typeof(IGDB.Models.PlatformFamily));
|
||||
BuildTableFromType(typeof(IGDB.Models.PlatformLogo));
|
||||
BuildTableFromType(typeof(IGDB.Models.PlatformVersion));
|
||||
BuildTableFromType(typeof(IGDB.Models.PlatformVersionCompany));
|
||||
BuildTableFromType(typeof(IGDB.Models.PlatformVersionReleaseDate));
|
||||
BuildTableFromType(typeof(IGDB.Models.PlatformWebsite));
|
||||
BuildTableFromType(typeof(IGDB.Models.PlayerPerspective));
|
||||
BuildTableFromType(typeof(IGDB.Models.PopularityPrimitive));
|
||||
BuildTableFromType(typeof(IGDB.Models.PopularityType));
|
||||
BuildTableFromType(typeof(IGDB.Models.Region));
|
||||
BuildTableFromType(typeof(IGDB.Models.ReleaseDate));
|
||||
BuildTableFromType(typeof(IGDB.Models.ReleaseDateRegion));
|
||||
BuildTableFromType(typeof(IGDB.Models.ReleaseDateStatus));
|
||||
BuildTableFromType(typeof(IGDB.Models.Screenshot));
|
||||
BuildTableFromType(typeof(IGDB.Models.Theme));
|
||||
BuildTableFromType(typeof(IGDB.Models.Website));
|
||||
BuildTableFromType(typeof(IGDB.Models.WebsiteType));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a table from a type definition, or modifies an existing table.
|
||||
/// This is used to create or update tables in the database based on the properties of a class.
|
||||
/// Updates are limited to adding new columns, as the table structure should not change once created.
|
||||
/// If the table already exists, it will only add new columns that are not already present.
|
||||
/// This is useful for maintaining a consistent schema across different versions of the application.
|
||||
/// The method is generic and can be used with any type that has properties that can be mapped to database columns.
|
||||
/// The method does not return any value, but it will throw an exception if there is an error during the table creation or modification process.
|
||||
/// </summary>
|
||||
/// <param name="type">The type definition of the class for which the table should be built.</param>
|
||||
public static void BuildTableFromType(Type type)
|
||||
{
|
||||
// Get the table name from the class name
|
||||
string tableName = type.Name;
|
||||
|
||||
// Start building the SQL command
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
// Use migration journal to track whether the rename step for this table has run.
|
||||
// This replaces the old Config.ReadSetting("RenameMigration_{tableName}") approach,
|
||||
// making the tracking schema-based rather than settings-based.
|
||||
string renameStepName = $"RenameToMetadata_{tableName}";
|
||||
if (!MigrationJournal.AlreadySucceeded(1031, MigrationJournal.StepType.PreUpgrade, renameStepName))
|
||||
{
|
||||
// rename the table if it exists
|
||||
// Check if the table exists via information_schema (portable, no IF EXISTS needed)
|
||||
string checkTableExistsQuery = $"SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = '{tableName}'";
|
||||
var result = db.ExecuteCMD(checkTableExistsQuery);
|
||||
if (Convert.ToInt32(result.Rows[0][0]) > 0)
|
||||
{
|
||||
// The table exists, so we will rename it
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database",
|
||||
"database.renaming_table_to_metadata_prefix",
|
||||
null, new[] { tableName, $"Metadata_{tableName}" });
|
||||
|
||||
string renameTableQuery = $"ALTER TABLE `{tableName}` RENAME TO `Metadata_{tableName}`";
|
||||
db.ExecuteNonQuery(renameTableQuery);
|
||||
}
|
||||
|
||||
// Record success in the journal so this step is skipped on any subsequent run
|
||||
long jId = MigrationJournal.Start(1031, MigrationJournal.StepType.PreUpgrade, renameStepName);
|
||||
MigrationJournal.Complete(jId);
|
||||
}
|
||||
// Update the table name to include the Metadata prefix
|
||||
tableName = $"Metadata_{tableName}";
|
||||
|
||||
// Get the properties of the class
|
||||
PropertyInfo[] properties = type.GetProperties();
|
||||
|
||||
// Create the table with the basic structure if it does not exist
|
||||
string createTableQuery = $"CREATE TABLE IF NOT EXISTS `{tableName}` (`Id` BIGINT PRIMARY KEY, `dateAdded` DATETIME DEFAULT CURRENT_TIMESTAMP, `lastUpdated` DATETIME DEFAULT CURRENT_TIMESTAMP )";
|
||||
db.ExecuteNonQuery(createTableQuery);
|
||||
|
||||
// Add the sourceId column if it does not exist
|
||||
string addSourceIdQuery = $"ALTER TABLE `{tableName}` ADD COLUMN IF NOT EXISTS `SourceId` INT";
|
||||
db.ExecuteNonQuery(addSourceIdQuery);
|
||||
|
||||
// Loop through each property to add it as a column in the table
|
||||
foreach (PropertyInfo property in properties)
|
||||
{
|
||||
// Get the property name and type
|
||||
string columnName = property.Name;
|
||||
string columnType = "VARCHAR(255)"; // Default type, can be changed based on property type
|
||||
|
||||
// Convert the property type name to a string
|
||||
string propertyTypeName = property.PropertyType.Name;
|
||||
if (propertyTypeName == "Nullable`1")
|
||||
{
|
||||
// If the property is nullable, get the underlying type
|
||||
propertyTypeName = property.PropertyType.GetGenericArguments()[0].Name;
|
||||
}
|
||||
|
||||
// Determine the SQL type based on the property type
|
||||
switch (propertyTypeName)
|
||||
{
|
||||
case "String":
|
||||
columnType = "VARCHAR(255)";
|
||||
break;
|
||||
case "Int32":
|
||||
columnType = "INT";
|
||||
break;
|
||||
case "Int64":
|
||||
columnType = "BIGINT";
|
||||
break;
|
||||
case "Boolean":
|
||||
columnType = "BOOLEAN";
|
||||
break;
|
||||
case "DateTime":
|
||||
case "DateTimeOffset":
|
||||
columnType = "DATETIME";
|
||||
break;
|
||||
case "Double":
|
||||
columnType = "DOUBLE";
|
||||
break;
|
||||
case "IdentityOrValue`1":
|
||||
columnType = "BIGINT";
|
||||
break;
|
||||
case "IdentitiesOrValues`1":
|
||||
columnType = "LONGTEXT";
|
||||
break;
|
||||
}
|
||||
|
||||
// check if there is a column with the name of the property
|
||||
string checkColumnQuery = $"SHOW COLUMNS FROM `{tableName}` LIKE '{columnName}'";
|
||||
var result = db.ExecuteCMD(checkColumnQuery);
|
||||
if (result.Rows.Count > 0)
|
||||
{
|
||||
// Column already exists, check if the type matches
|
||||
string existingType = result.Rows[0]["Type"].ToString();
|
||||
if (existingType.ToLower().Split("(")[0] != columnType.ToLower().Split("(")[0] && existingType != "text" && existingType != "longtext")
|
||||
{
|
||||
// Type mismatch: modify the column to expected type
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database",
|
||||
"database.modifying_column_type",
|
||||
null, new[] { columnName, tableName, existingType, columnType });
|
||||
string alterColumnQuery = $"ALTER TABLE `{tableName}` MODIFY COLUMN `{columnName}` {columnType}";
|
||||
try
|
||||
{
|
||||
db.ExecuteNonQuery(alterColumnQuery);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.database",
|
||||
"database.modify_column_type_failed",
|
||||
null, new[] { columnName, tableName, ex.Message }, ex);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
continue; // Skip this column as it already exists with the correct type
|
||||
}
|
||||
|
||||
// Add the column to the table
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database",
|
||||
"database.adding_column_to_table",
|
||||
null, new[] { columnName, columnType, tableName });
|
||||
string addColumnQuery = $"ALTER TABLE `{tableName}` ADD COLUMN IF NOT EXISTS `{columnName}` {columnType}";
|
||||
db.ExecuteNonQuery(addColumnQuery);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
399
gaseous-lib/Classes/Database/DatabaseMigrationManifest.cs
Normal file
399
gaseous-lib/Classes/Database/DatabaseMigrationManifest.cs
Normal file
|
|
@ -0,0 +1,399 @@
|
|||
using System;
|
||||
using System.Data;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Declares the expected database structural state for a given schema version.
|
||||
/// After a migration step completes, the engine runs the checks registered for
|
||||
/// that version, logs any failures, and terminates if they are critical.
|
||||
///
|
||||
/// HOW TO ADD VALIDATION FOR A NEW MIGRATION
|
||||
/// ------------------------------------------
|
||||
/// 1. Add a new entry to the _manifest list in BuildManifest(), using the helper
|
||||
/// methods (RequireTable, RequireColumn, RequireIndex, RequireView).
|
||||
/// 2. The CI check (check-migration-scripts.yml) will fail the PR if the latest
|
||||
/// SQL migration version has no manifest entry, keeping validation coverage
|
||||
/// always up to date.
|
||||
/// </summary>
|
||||
public static class DatabaseMigrationManifest
|
||||
{
|
||||
/// <summary>
|
||||
/// Describes a column that should exist as part of a validation check.
|
||||
/// </summary>
|
||||
public class ColumnSpec
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the column name that must exist.
|
||||
/// </summary>
|
||||
public string Name { get; init; } = "";
|
||||
/// <summary>
|
||||
/// Expected SQL type family (e.g. "varchar", "int", "bigint", "datetime").
|
||||
/// Compared case-insensitively against the prefix before any "(" in the
|
||||
/// actual column type, so "varchar(255)" matches "varchar".
|
||||
/// Leave null to skip type checking.
|
||||
/// </summary>
|
||||
public string? TypeFamily { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Defines one structural validation check for a specific schema version.
|
||||
/// </summary>
|
||||
public class ValidationEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the schema version this check belongs to.
|
||||
/// </summary>
|
||||
public int SchemaVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the human-readable name of the check.
|
||||
/// </summary>
|
||||
public string CheckName { get; init; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether a failed check should block startup.
|
||||
/// </summary>
|
||||
public bool IsCritical { get; init; } = true;
|
||||
|
||||
/// <summary>The name of the table or view to check.</summary>
|
||||
public string? Table { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the view name to validate when the check targets a view.
|
||||
/// </summary>
|
||||
public string? View { get; init; }
|
||||
|
||||
/// <summary>If set, the named column must exist in Table.</summary>
|
||||
public ColumnSpec? Column { get; init; }
|
||||
|
||||
/// <summary>If set, the named index must exist on Table.</summary>
|
||||
public string? Index { get; init; }
|
||||
}
|
||||
|
||||
private static readonly List<ValidationEntry> _manifest = BuildManifest();
|
||||
|
||||
/// <summary>
|
||||
/// Builds the in-memory validation manifest keyed by schema version.
|
||||
/// </summary>
|
||||
private static List<ValidationEntry> BuildManifest()
|
||||
{
|
||||
// -----------------------------------------------------------------------
|
||||
// Each block corresponds to one migration SQL file version.
|
||||
// Add new blocks here when you add a new gaseous-NNNN.sql file.
|
||||
// Mark IsCritical = false for checks that are advisory only.
|
||||
// -----------------------------------------------------------------------
|
||||
return new List<ValidationEntry>
|
||||
{
|
||||
// --- 1004: GameLibraries table ---
|
||||
new() { SchemaVersion = 1004, CheckName = "GameLibraries table exists",
|
||||
Table = "GameLibraries" },
|
||||
new() { SchemaVersion = 1004, CheckName = "GameLibraries.DefaultLibrary exists",
|
||||
Table = "GameLibraries", Column = new() { Name = "DefaultLibrary", TypeFamily = "int" } },
|
||||
|
||||
// --- 1016: Settings table has ValueType column ---
|
||||
new() { SchemaVersion = 1016, CheckName = "Settings.ValueType exists",
|
||||
Table = "Settings", Column = new() { Name = "ValueType" } },
|
||||
|
||||
// --- 1023: Country and Language lookup tables ---
|
||||
new() { SchemaVersion = 1023, CheckName = "Country table exists", Table = "Country" },
|
||||
new() { SchemaVersion = 1023, CheckName = "Language table exists", Table = "Language" },
|
||||
|
||||
// --- 1027: UserProfiles table ---
|
||||
new() { SchemaVersion = 1027, CheckName = "UserProfiles table exists", Table = "UserProfiles" },
|
||||
new() { SchemaVersion = 1027, CheckName = "UserProfiles.UserId exists",
|
||||
Table = "UserProfiles", Column = new() { Name = "UserId" } },
|
||||
|
||||
// --- 1031: Core Metadata_ tables ---
|
||||
new() { SchemaVersion = 1031, CheckName = "Metadata_Game table exists", Table = "Metadata_Game" },
|
||||
new() { SchemaVersion = 1031, CheckName = "Metadata_Platform table exists", Table = "Metadata_Platform" },
|
||||
|
||||
// --- 1035: Relation_Game_ tables and indexes ---
|
||||
new() { SchemaVersion = 1035, CheckName = "Relation_Game_Genres exists",
|
||||
Table = "Relation_Game_Genres" },
|
||||
new() { SchemaVersion = 1035, CheckName = "idx_Relation_Genres_composite exists",
|
||||
Table = "Relation_Game_Genres", Index = "idx_Relation_Genres_composite" },
|
||||
|
||||
// --- 1036: Metadata_Game.MetadataSource column ---
|
||||
new() { SchemaVersion = 1036, CheckName = "Metadata_Game.MetadataSource exists",
|
||||
Table = "Metadata_Game", Column = new() { Name = "MetadataSource", TypeFamily = "int" } },
|
||||
new() { SchemaVersion = 1036, CheckName = "Metadata_AgeRatingContentDescription table exists",
|
||||
Table = "Metadata_AgeRatingContentDescription" },
|
||||
|
||||
// --- 1037: Metadata_GameVideo.VideoId updated ---
|
||||
new() { SchemaVersion = 1037, CheckName = "Metadata_GameVideo.VideoId exists",
|
||||
Table = "Metadata_GameVideo", Column = new() { Name = "VideoId", TypeFamily = "varchar" } },
|
||||
|
||||
// --- 1038: MetadataMap.SignatureGameNameThe column ---
|
||||
new() { SchemaVersion = 1038, CheckName = "MetadataMap.SignatureGameNameThe exists",
|
||||
Table = "MetadataMap", Column = new() { Name = "SignatureGameNameThe", TypeFamily = "varchar" } },
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns all manifest entries for schema versions > <paramref name="fromVersion"/>
|
||||
/// and <= <paramref name="toVersion"/>. Used after a batch of migrations to validate
|
||||
/// everything that was applied.
|
||||
/// </summary>
|
||||
public static IEnumerable<ValidationEntry> GetEntriesForRange(int fromVersion, int toVersion)
|
||||
=> _manifest.Where(e => e.SchemaVersion > fromVersion && e.SchemaVersion <= toVersion);
|
||||
|
||||
/// <summary>
|
||||
/// Returns all manifest entries for exactly <paramref name="version"/>.
|
||||
/// </summary>
|
||||
public static IEnumerable<ValidationEntry> GetEntriesForVersion(int version)
|
||||
=> _manifest.Where(e => e.SchemaVersion == version);
|
||||
|
||||
/// <summary>
|
||||
/// Returns the highest schema version that has at least one manifest entry.
|
||||
/// Used by the CI consistency check to verify that the manifest covers the
|
||||
/// latest migration SQL file.
|
||||
/// </summary>
|
||||
public static int MaxManifestVersion
|
||||
=> _manifest.Count > 0 ? _manifest.Max(e => e.SchemaVersion) : 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes validation entries against the live database and returns a result list.
|
||||
/// </summary>
|
||||
public static class DatabaseMigrationValidator
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents the outcome of running a single manifest validation check.
|
||||
/// </summary>
|
||||
public class ValidationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the display name of the validation check.
|
||||
/// </summary>
|
||||
public string CheckName { get; init; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Gets the schema version associated with the check.
|
||||
/// </summary>
|
||||
public int SchemaVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether the check passed.
|
||||
/// </summary>
|
||||
public bool Passed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether a failure should block startup.
|
||||
/// </summary>
|
||||
public bool IsCritical { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the failure reason when the check does not pass.
|
||||
/// </summary>
|
||||
public string? FailureReason { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runs all manifest checks for the given schema version range.
|
||||
/// Logs each result. Returns false if any critical check failed.
|
||||
/// </summary>
|
||||
public static bool ValidateRange(int fromVersion, int toVersion)
|
||||
{
|
||||
var entries = DatabaseMigrationManifest.GetEntriesForRange(fromVersion, toVersion);
|
||||
return RunChecks(entries);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runs all manifest checks for a single schema version.
|
||||
/// Called from <see cref="Database.InitDB"/> after each migration step.
|
||||
/// </summary>
|
||||
public static bool ValidateVersion(int version)
|
||||
{
|
||||
var entries = DatabaseMigrationManifest.GetEntriesForVersion(version);
|
||||
return RunChecks(entries);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes a set of validation entries and records the outcomes in the journal.
|
||||
/// </summary>
|
||||
private static bool RunChecks(IEnumerable<DatabaseMigrationManifest.ValidationEntry> entries)
|
||||
{
|
||||
bool allPassed = true;
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string dbName = Config.DatabaseConfiguration.DatabaseName;
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
ValidationResult result = Check(db, dbName, entry);
|
||||
|
||||
if (result.Passed)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database",
|
||||
"database.validation_check_passed",
|
||||
null, new[] { result.SchemaVersion.ToString(), result.CheckName });
|
||||
}
|
||||
else
|
||||
{
|
||||
var logLevel = result.IsCritical
|
||||
? Logging.LogType.Critical
|
||||
: Logging.LogType.Warning;
|
||||
|
||||
Logging.LogKey(logLevel, "process.database",
|
||||
"database.validation_check_failed",
|
||||
null, new[] { result.SchemaVersion.ToString(), result.CheckName, result.FailureReason ?? "" });
|
||||
|
||||
if (result.IsCritical) allPassed = false;
|
||||
}
|
||||
|
||||
// Write journal entry for each check
|
||||
long jId = MigrationJournal.Start(
|
||||
entry.SchemaVersion,
|
||||
MigrationJournal.StepType.Validation,
|
||||
entry.CheckName);
|
||||
if (result.Passed)
|
||||
MigrationJournal.Complete(jId);
|
||||
else
|
||||
MigrationJournal.Fail(jId, result.FailureReason);
|
||||
}
|
||||
|
||||
return allPassed;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates a single validation entry against the live database.
|
||||
/// </summary>
|
||||
private static ValidationResult Check(Database db, string dbName,
|
||||
DatabaseMigrationManifest.ValidationEntry entry)
|
||||
{
|
||||
try
|
||||
{
|
||||
// --- Table check ---
|
||||
if (entry.Table != null && entry.Column == null && entry.Index == null && entry.View == null)
|
||||
{
|
||||
string sql = "SELECT COUNT(*) FROM information_schema.tables " +
|
||||
"WHERE table_schema = @db AND table_name = @tbl";
|
||||
var p = new Dictionary<string, object> { { "db", dbName }, { "tbl", entry.Table } };
|
||||
DataTable r = db.ExecuteCMD(sql, p);
|
||||
bool exists = Convert.ToInt32(r.Rows[0][0]) > 0;
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = exists,
|
||||
FailureReason = exists ? null : $"Table '{entry.Table}' not found in database '{dbName}'"
|
||||
};
|
||||
}
|
||||
|
||||
// --- View check ---
|
||||
if (entry.View != null)
|
||||
{
|
||||
string sql = "SELECT COUNT(*) FROM information_schema.views " +
|
||||
"WHERE table_schema = @db AND table_name = @vw";
|
||||
var p = new Dictionary<string, object> { { "db", dbName }, { "vw", entry.View } };
|
||||
DataTable r = db.ExecuteCMD(sql, p);
|
||||
bool exists = Convert.ToInt32(r.Rows[0][0]) > 0;
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = exists,
|
||||
FailureReason = exists ? null : $"View '{entry.View}' not found"
|
||||
};
|
||||
}
|
||||
|
||||
// --- Column check ---
|
||||
if (entry.Table != null && entry.Column != null)
|
||||
{
|
||||
string sql = "SELECT COLUMN_TYPE FROM information_schema.COLUMNS " +
|
||||
"WHERE TABLE_SCHEMA = @db AND TABLE_NAME = @tbl AND COLUMN_NAME = @col";
|
||||
var p = new Dictionary<string, object>
|
||||
{
|
||||
{ "db", dbName },
|
||||
{ "tbl", entry.Table },
|
||||
{ "col", entry.Column.Name }
|
||||
};
|
||||
DataTable r = db.ExecuteCMD(sql, p);
|
||||
if (r.Rows.Count == 0)
|
||||
{
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = false,
|
||||
FailureReason = $"Column '{entry.Column.Name}' not found in table '{entry.Table}'"
|
||||
};
|
||||
}
|
||||
|
||||
if (entry.Column.TypeFamily != null)
|
||||
{
|
||||
string actualType = r.Rows[0]["COLUMN_TYPE"].ToString() ?? "";
|
||||
string actualFamily = actualType.Split('(')[0].Trim().ToLowerInvariant();
|
||||
string expectedFamily = entry.Column.TypeFamily.ToLowerInvariant();
|
||||
if (actualFamily != expectedFamily)
|
||||
{
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = false,
|
||||
FailureReason = $"Column '{entry.Column.Name}' in '{entry.Table}' has type '{actualType}', expected family '{expectedFamily}'"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = true
|
||||
};
|
||||
}
|
||||
|
||||
// --- Index check ---
|
||||
if (entry.Table != null && entry.Index != null)
|
||||
{
|
||||
string sql = "SELECT COUNT(*) FROM information_schema.STATISTICS " +
|
||||
"WHERE TABLE_SCHEMA = @db AND TABLE_NAME = @tbl AND INDEX_NAME = @idx";
|
||||
var p = new Dictionary<string, object>
|
||||
{
|
||||
{ "db", dbName },
|
||||
{ "tbl", entry.Table },
|
||||
{ "idx", entry.Index }
|
||||
};
|
||||
DataTable r = db.ExecuteCMD(sql, p);
|
||||
bool exists = Convert.ToInt32(r.Rows[0][0]) > 0;
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = exists,
|
||||
FailureReason = exists ? null :
|
||||
$"Index '{entry.Index}' not found on table '{entry.Table}'"
|
||||
};
|
||||
}
|
||||
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = false,
|
||||
FailureReason = "Unrecognised check specification"
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ValidationResult
|
||||
{
|
||||
CheckName = entry.CheckName,
|
||||
SchemaVersion = entry.SchemaVersion,
|
||||
IsCritical = entry.IsCritical,
|
||||
Passed = false,
|
||||
FailureReason = $"Exception during check: {ex.Message}"
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
535
gaseous-lib/Classes/Database/MemoryCache.cs
Normal file
535
gaseous-lib/Classes/Database/MemoryCache.cs
Normal file
|
|
@ -0,0 +1,535 @@
|
|||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Threading;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides an in-memory, thread-safe cache with simple expiration for objects.
|
||||
/// </summary>
|
||||
public class MemoryCache
|
||||
{
|
||||
// Stats counters (Int64 for atomic increments)
|
||||
private long _hits = 0;
|
||||
private long _misses = 0;
|
||||
private long _evictions = 0; // total evictions (expired + size)
|
||||
private long _expirationEvictions = 0; // evictions due to expiration
|
||||
private long _sizeEvictions = 0; // evictions due to size limit
|
||||
private DateTime _lastResetUtc = DateTime.UtcNow;
|
||||
|
||||
// LRU list management (optional when _maxSize > 0)
|
||||
private readonly object _lruLock = new();
|
||||
private MemoryCacheItem? _lruHead = null; // most recently used
|
||||
private MemoryCacheItem? _lruTail = null; // least recently used
|
||||
private readonly int _maxSize = 0; // 0 = unlimited
|
||||
private long _lastStatsLogTick = Environment.TickCount64;
|
||||
// Optional snapshotting support
|
||||
private readonly bool _cloneOnSet = false;
|
||||
private readonly bool _cloneOnGet = false;
|
||||
private readonly Func<object, object>? _cloner = null; // custom clone delegate
|
||||
|
||||
/// <summary>
|
||||
/// Represents a cached item with its associated object and expiration details.
|
||||
/// </summary>
|
||||
private sealed class MemoryCacheItem
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the MemoryCacheItem class with the specified object to cache.
|
||||
/// </summary>
|
||||
/// <param name="CacheObject">
|
||||
/// The object instance to be cached.
|
||||
/// </param>
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the MemoryCacheItem class with the specified object to cache and expiration time.
|
||||
/// </summary>
|
||||
/// <param name="key">The cache key.</param>
|
||||
/// <param name="CacheObject">The object instance to be cached.</param>
|
||||
/// <param name="ExpirationSeconds">The number of seconds before the cached object expires.</param>
|
||||
public MemoryCacheItem(string key, object CacheObject, int ExpirationSeconds)
|
||||
{
|
||||
Key = key;
|
||||
cacheObject = CacheObject;
|
||||
SetExpirationSeconds(ExpirationSeconds);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The time the object was added to the cache in ticks
|
||||
/// </summary>
|
||||
public long addedTime { get; } = Environment.TickCount64;
|
||||
|
||||
/// <summary>
|
||||
/// The time the object will expire in ticks
|
||||
/// </summary>
|
||||
public long expirationTime
|
||||
{
|
||||
get
|
||||
{
|
||||
return addedTime + _expirationTicks;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The number of seconds the object will be cached
|
||||
/// </summary>
|
||||
public int GetExpirationSeconds() => (int)TimeSpan.FromTicks(_expirationTicks).TotalSeconds;
|
||||
public void SetExpirationSeconds(int seconds) => _expirationTicks = TimeSpan.FromSeconds(seconds).Ticks;
|
||||
private long _expirationTicks = TimeSpan.FromSeconds(2).Ticks;
|
||||
|
||||
/// <summary>
|
||||
/// The object to be cached
|
||||
/// </summary>
|
||||
public object cacheObject { get; set; }
|
||||
|
||||
// LRU tracking
|
||||
public MemoryCacheItem? Prev { get; set; }
|
||||
public MemoryCacheItem? Next { get; set; }
|
||||
public long lastAccessed { get; set; } = Environment.TickCount64;
|
||||
public string Key { get; }
|
||||
}
|
||||
// Use a thread-safe concurrent dictionary while keeping existing calling code intact.
|
||||
// NOTE: Add `using System.Collections.Concurrent;` at the top of the file.
|
||||
private sealed class ConcurrentMemoryCache : ConcurrentDictionary<string, MemoryCacheItem>
|
||||
{
|
||||
// Provide Add/Remove to mimic Dictionary API already used elsewhere.
|
||||
public void Add(string key, MemoryCacheItem value)
|
||||
{
|
||||
// Mimic Dictionary.Add (throw if key exists) so existing logic (which removes first) still behaves.
|
||||
if (!TryAdd(key, value))
|
||||
{
|
||||
// overwrite the existing value to avoid exceptions in current code paths.
|
||||
this[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
public void Remove(string key)
|
||||
{
|
||||
TryRemove(key, out _);
|
||||
}
|
||||
|
||||
public static implicit operator Dictionary<string, MemoryCacheItem>(ConcurrentMemoryCache source)
|
||||
=> new Dictionary<string, MemoryCacheItem>(source);
|
||||
}
|
||||
|
||||
// Static initializer ensures the timer starts as soon as the type is first referenced.
|
||||
private readonly ConcurrentMemoryCache memoryCache;
|
||||
// timer kept alive by reference in this field list; we don't access it elsewhere
|
||||
private readonly Timer _timer;
|
||||
private const string LogCategory = "Cache";
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new MemoryCache with unlimited size (time-based expiration only).
|
||||
/// </summary>
|
||||
public MemoryCache() : this(0) { }
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new MemoryCache with an optional maximum size. When maxSize > 0 an LRU policy is applied.
|
||||
/// </summary>
|
||||
/// <param name="maxSize">Maximum number of items allowed (0 = unlimited).</param>
|
||||
public MemoryCache(int maxSize)
|
||||
{
|
||||
_maxSize = maxSize < 0 ? 0 : maxSize;
|
||||
memoryCache = new ConcurrentMemoryCache();
|
||||
_timer = new Timer(CacheTimerCallback, null, TimeSpan.Zero, TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new MemoryCache with size limit and optional cloning behavior.
|
||||
/// </summary>
|
||||
/// <param name="maxSize">Maximum number of items (0 = unlimited).</param>
|
||||
/// <param name="cloneOnSet">If true a snapshot copy of the value is created when inserting into cache.</param>
|
||||
/// <param name="cloneOnGet">If true a fresh copy of the cached value is returned on every Get.</param>
|
||||
/// <param name="cloner">Custom clone delegate. If null and cloning enabled, a JSON deep clone is attempted.</param>
|
||||
public MemoryCache(int maxSize, bool cloneOnSet, bool cloneOnGet, Func<object, object>? cloner = null) : this(maxSize)
|
||||
{
|
||||
_cloneOnSet = cloneOnSet;
|
||||
_cloneOnGet = cloneOnGet;
|
||||
_cloner = cloner ?? (_cloneOnSet || _cloneOnGet ? DefaultDeepClone : null);
|
||||
}
|
||||
|
||||
private static object DefaultDeepClone(object source)
|
||||
{
|
||||
// Fallback deep clone via System.Text.Json (handles simple POCO graphs). Streams / non-serializable members will be defaulted.
|
||||
try
|
||||
{
|
||||
var type = source.GetType();
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(source, type);
|
||||
return System.Text.Json.JsonSerializer.Deserialize(json, type) ?? source; // if deserialize fails, return original to avoid null
|
||||
}
|
||||
catch
|
||||
{
|
||||
return source; // fail open – better to return original than throw inside cache path
|
||||
}
|
||||
}
|
||||
|
||||
private void CacheTimerCallback(object? state)
|
||||
{
|
||||
ClearExpiredCache();
|
||||
// Log stats every 5 minutes using the same timer thread (1s interval)
|
||||
try
|
||||
{
|
||||
long now = Environment.TickCount64;
|
||||
if (now - _lastStatsLogTick >= TimeSpan.FromMinutes(5).TotalMilliseconds)
|
||||
{
|
||||
_lastStatsLogTick = now;
|
||||
var stats = GetStats();
|
||||
Logging.LogKey(Logging.LogType.Information, "process.cache", "cache.stats", null, new[] { stats.ItemCount.ToString(), (stats.MaxSize == 0 ? "inf" : stats.MaxSize.ToString()), stats.Hits.ToString(), stats.Misses.ToString(), stats.HitRate.ToString("P2"), stats.Evictions.ToString(), stats.ExpirationEvictions.ToString(), stats.SizeEvictions.ToString(), stats.Requests.ToString() });
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.cache", "cache.error_logging_cache_statistics", null, null, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a cached object by key if it exists and has not expired; otherwise returns null (also cleans up expired entry).
|
||||
/// </summary>
|
||||
/// <param name="CacheKey">The unique cache key.</param>
|
||||
/// <returns>The cached object instance, or null if missing or expired.</returns>
|
||||
public object? GetCacheObject(string CacheKey)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (memoryCache.TryGetValue(CacheKey, out var cacheItem))
|
||||
{
|
||||
if (cacheItem.expirationTime < Environment.TickCount64)
|
||||
{
|
||||
// Expired – treat as miss & eviction
|
||||
memoryCache.Remove(CacheKey);
|
||||
Interlocked.Increment(ref _misses);
|
||||
Interlocked.Increment(ref _evictions);
|
||||
Interlocked.Increment(ref _expirationEvictions);
|
||||
// Remove from LRU list if size tracking enabled
|
||||
if (_maxSize > 0)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
RemoveNode(cacheItem);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// Hit
|
||||
Interlocked.Increment(ref _hits);
|
||||
if (_maxSize > 0)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
cacheItem.lastAccessed = Environment.TickCount64;
|
||||
MoveToHead(cacheItem);
|
||||
}
|
||||
}
|
||||
var value = cacheItem.cacheObject;
|
||||
if (_cloneOnGet && _cloner != null && value != null)
|
||||
{
|
||||
value = _cloner(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
// Miss (no key)
|
||||
Interlocked.Increment(ref _misses);
|
||||
return null;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// On error consider it a miss
|
||||
Interlocked.Increment(ref _misses);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// Adds or replaces an object in the in-memory cache with an optional expiration time in seconds.
|
||||
/// </summary>
|
||||
/// <param name="CacheKey">The unique key used to identify the cached object.</param>
|
||||
/// <param name="CacheObject">The object instance to cache.</param>
|
||||
/// <param name="ExpirationSeconds">How many seconds the object should remain cached (default is 2 seconds).</param>
|
||||
/// <remarks>
|
||||
/// If an existing item with the same key is present it is removed before adding the new one; on failure the cache is cleared and the error is logged.
|
||||
/// </remarks>
|
||||
public void SetCacheObject(string CacheKey, object CacheObject, int ExpirationSeconds = 2)
|
||||
{
|
||||
try
|
||||
{
|
||||
MemoryCacheItem? existing = null;
|
||||
if (memoryCache.TryGetValue(CacheKey, out existing))
|
||||
{
|
||||
// Replace existing
|
||||
memoryCache.Remove(CacheKey);
|
||||
if (_maxSize > 0 && existing != null)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
RemoveNode(existing);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (CacheObject == null)
|
||||
{
|
||||
return; // do not store null entries; treat as no-op
|
||||
}
|
||||
if (_cloneOnSet && _cloner != null)
|
||||
{
|
||||
CacheObject = _cloner(CacheObject);
|
||||
}
|
||||
var newItem = new MemoryCacheItem(CacheKey, CacheObject, ExpirationSeconds);
|
||||
memoryCache.Add(CacheKey, newItem);
|
||||
if (_maxSize > 0)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
AddToHead(newItem);
|
||||
EnforceSizeLimit();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.cache", "cache.error_setting_cache_object", null, null, ex);
|
||||
ClearCache();
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// Removes a cached object by key if it exists.
|
||||
/// </summary>
|
||||
/// <param name="CacheKey">
|
||||
/// The unique cache key.
|
||||
/// </param>
|
||||
public void RemoveCacheObject(string CacheKey)
|
||||
{
|
||||
if (memoryCache.TryGetValue(CacheKey, out var existing))
|
||||
{
|
||||
memoryCache.Remove(CacheKey);
|
||||
if (_maxSize > 0 && existing != null)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
RemoveNode(existing);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// Removes multiple cached objects by their keys if they exist.
|
||||
/// </summary>
|
||||
/// <param name="CacheKeys">
|
||||
/// A list of unique cache keys.
|
||||
/// </param>
|
||||
public void RemoveCacheObject(List<string> CacheKeys)
|
||||
{
|
||||
foreach (string key in CacheKeys)
|
||||
{
|
||||
if (memoryCache.TryGetValue(key, out var existing))
|
||||
{
|
||||
memoryCache.Remove(key);
|
||||
if (_maxSize > 0 && existing != null)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
RemoveNode(existing);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
/// Clears all items from the in-memory cache.
|
||||
/// </summary>
|
||||
public void ClearCache()
|
||||
{
|
||||
memoryCache.Clear();
|
||||
if (_maxSize > 0)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
_lruHead = null;
|
||||
_lruTail = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
private void ClearExpiredCache()
|
||||
{
|
||||
try
|
||||
{
|
||||
long currTime = Environment.TickCount64;
|
||||
|
||||
List<string> toRemove = new();
|
||||
foreach (var kvp in memoryCache)
|
||||
{
|
||||
var item = kvp.Value;
|
||||
if (item.expirationTime < currTime)
|
||||
{
|
||||
toRemove.Add(kvp.Key);
|
||||
}
|
||||
}
|
||||
foreach (var key in toRemove)
|
||||
{
|
||||
if (memoryCache.TryGetValue(key, out var item))
|
||||
{
|
||||
Console.WriteLine("\x1b[95mPurging expired cache item " + key + ". Added: " + item.addedTime + ". Expired: " + item.expirationTime);
|
||||
memoryCache.Remove(key);
|
||||
Interlocked.Increment(ref _evictions);
|
||||
Interlocked.Increment(ref _expirationEvictions);
|
||||
if (_maxSize > 0)
|
||||
{
|
||||
lock (_lruLock)
|
||||
{
|
||||
RemoveNode(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Debug, "process.cache", "cache.error_clearing_expired_cache", null, null, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns current cache statistics (snapshot values).
|
||||
/// </summary>
|
||||
public MemoryCacheStats GetStats()
|
||||
{
|
||||
long hits = Interlocked.Read(ref _hits);
|
||||
long misses = Interlocked.Read(ref _misses);
|
||||
long evictions = Interlocked.Read(ref _evictions);
|
||||
long expirationEvictions = Interlocked.Read(ref _expirationEvictions);
|
||||
long sizeEvictions = Interlocked.Read(ref _sizeEvictions);
|
||||
return new MemoryCacheStats
|
||||
{
|
||||
Hits = hits,
|
||||
Misses = misses,
|
||||
Evictions = evictions,
|
||||
ExpirationEvictions = expirationEvictions,
|
||||
SizeEvictions = sizeEvictions,
|
||||
ItemCount = memoryCache.Count,
|
||||
MaxSize = _maxSize,
|
||||
LastResetUtc = _lastResetUtc
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets cache statistics counters to zero.
|
||||
/// </summary>
|
||||
public void ResetStats()
|
||||
{
|
||||
Interlocked.Exchange(ref _hits, 0);
|
||||
Interlocked.Exchange(ref _misses, 0);
|
||||
Interlocked.Exchange(ref _evictions, 0);
|
||||
Interlocked.Exchange(ref _expirationEvictions, 0);
|
||||
Interlocked.Exchange(ref _sizeEvictions, 0);
|
||||
_lastResetUtc = DateTime.UtcNow;
|
||||
}
|
||||
|
||||
// LRU helper methods (guard calls with _lruLock)
|
||||
private void AddToHead(MemoryCacheItem item)
|
||||
{
|
||||
item.Prev = null;
|
||||
item.Next = _lruHead;
|
||||
if (_lruHead != null) _lruHead.Prev = item;
|
||||
_lruHead = item;
|
||||
if (_lruTail == null) _lruTail = item; // first item
|
||||
}
|
||||
|
||||
private void MoveToHead(MemoryCacheItem item)
|
||||
{
|
||||
if (_lruHead == item) return;
|
||||
RemoveNode(item);
|
||||
AddToHead(item);
|
||||
}
|
||||
|
||||
private void RemoveNode(MemoryCacheItem item)
|
||||
{
|
||||
var prev = item.Prev;
|
||||
var next = item.Next;
|
||||
if (prev != null) prev.Next = next; else if (_lruHead == item) _lruHead = next;
|
||||
if (next != null) next.Prev = prev; else if (_lruTail == item) _lruTail = prev;
|
||||
item.Prev = null;
|
||||
item.Next = null;
|
||||
}
|
||||
|
||||
private void EnforceSizeLimit()
|
||||
{
|
||||
if (_maxSize <= 0) return;
|
||||
while (memoryCache.Count > _maxSize && _lruTail != null)
|
||||
{
|
||||
var toEvict = _lruTail;
|
||||
RemoveNode(toEvict);
|
||||
memoryCache.Remove(toEvict.Key);
|
||||
Interlocked.Increment(ref _evictions);
|
||||
Interlocked.Increment(ref _sizeEvictions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot of memory cache statistics.
|
||||
/// </summary>
|
||||
public class MemoryCacheStats
|
||||
{
|
||||
/// <summary>Total number of successful cache lookups.</summary>
|
||||
public long Hits { get; init; }
|
||||
/// <summary>Total number of failed cache lookups (missing or expired).</summary>
|
||||
public long Misses { get; init; }
|
||||
/// <summary>Total number of items removed because they expired.</summary>
|
||||
public long Evictions { get; init; }
|
||||
/// <summary>Number of evictions caused by expiration.</summary>
|
||||
public long ExpirationEvictions { get; init; }
|
||||
/// <summary>Number of evictions caused by size limit (LRU policy).</summary>
|
||||
public long SizeEvictions { get; init; }
|
||||
/// <summary>Current number of items stored in the cache.</summary>
|
||||
public int ItemCount { get; init; }
|
||||
/// <summary>Configured maximum size (0 = unlimited).</summary>
|
||||
public int MaxSize { get; init; }
|
||||
/// <summary>UTC timestamp when statistics were last reset.</summary>
|
||||
public DateTime LastResetUtc { get; init; }
|
||||
|
||||
/// <summary>Total number of Get attempts (Hits + Misses).</summary>
|
||||
public long Requests => Hits + Misses;
|
||||
/// <summary>Fraction of requests that were hits (0 if no requests).</summary>
|
||||
public double HitRate => Requests == 0 ? 0 : (double)Hits / Requests;
|
||||
/// <summary>Fraction of requests that were misses (0 if no requests).</summary>
|
||||
public double MissRate => Requests == 0 ? 0 : (double)Misses / Requests;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for configuring the behavior of the DatabaseMemoryCache.
|
||||
/// </summary>
|
||||
public class DatabaseMemoryCacheOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the DatabaseMemoryCacheOptions class with specified settings.
|
||||
/// </summary>
|
||||
/// <param name="CacheEnabled">
|
||||
/// Whether caching is enabled.
|
||||
/// </param>
|
||||
/// <param name="ExpirationSeconds">
|
||||
/// The number of seconds before a cached item expires.
|
||||
/// </param>
|
||||
/// <param name="CacheKey">
|
||||
/// The unique key used to identify the cached object.
|
||||
/// </param>
|
||||
public DatabaseMemoryCacheOptions(bool CacheEnabled = false, int ExpirationSeconds = 1, string? CacheKey = null)
|
||||
{
|
||||
this.CacheEnabled = CacheEnabled;
|
||||
this.ExpirationSeconds = ExpirationSeconds;
|
||||
this.CacheKey = CacheKey;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a value indicating whether caching is enabled.
|
||||
/// </summary>
|
||||
public bool CacheEnabled { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the number of seconds before a cached item expires.
|
||||
/// </summary>
|
||||
public int ExpirationSeconds { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the unique key used to identify the cached object.
|
||||
/// </summary>
|
||||
public string? CacheKey { get; set; } = null;
|
||||
}
|
||||
}
|
||||
178
gaseous-lib/Classes/Database/MigrationJournal.cs
Normal file
178
gaseous-lib/Classes/Database/MigrationJournal.cs
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
using System;
|
||||
using System.Data;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Manages the migration_journal table, which records the lifecycle of every
|
||||
/// migration step (pre-upgrade code, SQL script, post-upgrade code, validation).
|
||||
/// This replaces settings-based tracking (e.g. RenameMigration_*) with a single,
|
||||
/// schema-verifiable source of truth.
|
||||
/// </summary>
|
||||
public static class MigrationJournal
|
||||
{
|
||||
/// <summary>
|
||||
/// Identifies the category of migration work recorded in the journal.
|
||||
/// </summary>
|
||||
public enum StepType
|
||||
{
|
||||
/// <summary>Blocking code that runs before a schema script.</summary>
|
||||
PreUpgrade,
|
||||
/// <summary>The versioned SQL script for a schema step.</summary>
|
||||
SqlScript,
|
||||
/// <summary>Code that runs after a schema script completes.</summary>
|
||||
PostUpgrade,
|
||||
/// <summary>Post-step structural validation checks.</summary>
|
||||
Validation,
|
||||
/// <summary>Deferred migration work executed in the background.</summary>
|
||||
BackgroundTask
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the lifecycle state of a journal entry.
|
||||
/// </summary>
|
||||
public enum StepStatus
|
||||
{
|
||||
/// <summary>The step has started and is still in progress.</summary>
|
||||
Started,
|
||||
/// <summary>The step completed successfully.</summary>
|
||||
Succeeded,
|
||||
/// <summary>The step ended with an error.</summary>
|
||||
Failed,
|
||||
/// <summary>The step was intentionally skipped.</summary>
|
||||
Skipped
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensures the migration_journal table exists. Called at the very start of
|
||||
/// InitDB, before any migration steps run. Uses a plain CREATE TABLE statement
|
||||
/// without IF NOT EXISTS so that it works across database engines; wraps the
|
||||
/// call in a try/catch to silently continue when the table already exists.
|
||||
/// </summary>
|
||||
public static void EnsureTable()
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
// Check existence via information_schema rather than relying on IF NOT EXISTS
|
||||
string checkSql = "SELECT COUNT(*) FROM information_schema.tables " +
|
||||
"WHERE table_schema = @dbname AND table_name = 'migration_journal'";
|
||||
var dbDict = new Dictionary<string, object> { { "dbname", Config.DatabaseConfiguration.DatabaseName } };
|
||||
DataTable result = db.ExecuteCMD(checkSql, dbDict);
|
||||
|
||||
if (Convert.ToInt32(result.Rows[0][0]) == 0)
|
||||
{
|
||||
string createSql = @"
|
||||
CREATE TABLE `migration_journal` (
|
||||
`Id` BIGINT NOT NULL AUTO_INCREMENT,
|
||||
`SchemaVersion` INT NOT NULL,
|
||||
`StepType` VARCHAR(32) NOT NULL,
|
||||
`StepName` VARCHAR(256) NOT NULL,
|
||||
`Status` VARCHAR(32) NOT NULL,
|
||||
`StartedAt` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
`CompletedAt` DATETIME NULL,
|
||||
`ErrorMessage` TEXT NULL,
|
||||
PRIMARY KEY (`Id`),
|
||||
INDEX `idx_journal_version_step` (`SchemaVersion`, `StepType`)
|
||||
)";
|
||||
db.ExecuteNonQuery(createSql);
|
||||
Logging.LogKey(Logging.LogType.Information, "process.database", "database.migration_journal_table_created");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records the start of a migration step and returns the journal row Id so the
|
||||
/// caller can later call Complete() or Fail() with the same Id.
|
||||
/// </summary>
|
||||
public static long Start(int schemaVersion, StepType stepType, string stepName)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = @"
|
||||
INSERT INTO migration_journal (SchemaVersion, StepType, StepName, Status, StartedAt)
|
||||
VALUES (@ver, @type, @name, @status, UTC_TIMESTAMP());
|
||||
SELECT CAST(LAST_INSERT_ID() AS SIGNED);";
|
||||
var dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "ver", schemaVersion },
|
||||
{ "type", stepType.ToString() },
|
||||
{ "name", stepName },
|
||||
{ "status", StepStatus.Started.ToString() }
|
||||
};
|
||||
DataTable data = db.ExecuteCMD(sql, dbDict);
|
||||
return Convert.ToInt64(data.Rows[0][0]);
|
||||
}
|
||||
|
||||
/// <summary>Marks an in-progress journal entry as succeeded.</summary>
|
||||
public static void Complete(long journalId)
|
||||
{
|
||||
SetFinal(journalId, StepStatus.Succeeded, null);
|
||||
}
|
||||
|
||||
/// <summary>Marks an in-progress journal entry as failed, storing the error message.</summary>
|
||||
public static void Fail(long journalId, string errorMessage)
|
||||
{
|
||||
SetFinal(journalId, StepStatus.Failed, errorMessage);
|
||||
}
|
||||
|
||||
/// <summary>Marks an in-progress journal entry as skipped (e.g. step no longer applicable).</summary>
|
||||
public static void Skip(long journalId, string? reason = null)
|
||||
{
|
||||
SetFinal(journalId, StepStatus.Skipped, reason);
|
||||
}
|
||||
|
||||
private static void SetFinal(long journalId, StepStatus status, string? errorMessage)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = @"
|
||||
UPDATE migration_journal
|
||||
SET Status = @status, CompletedAt = UTC_TIMESTAMP(), ErrorMessage = @err
|
||||
WHERE Id = @id";
|
||||
var dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "status", status.ToString() },
|
||||
{ "err", (object?)errorMessage ?? DBNull.Value },
|
||||
{ "id", journalId }
|
||||
};
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns true when a step for the given version and type already completed
|
||||
/// successfully. Used to make steps idempotent: check before running, skip if
|
||||
/// already succeeded. This replaces Config.ReadSetting("RenameMigration_*").
|
||||
/// </summary>
|
||||
public static bool AlreadySucceeded(int schemaVersion, StepType stepType, string stepName)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = @"
|
||||
SELECT COUNT(*) FROM migration_journal
|
||||
WHERE SchemaVersion = @ver
|
||||
AND StepType = @type
|
||||
AND StepName = @name
|
||||
AND Status = @status";
|
||||
var dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "ver", schemaVersion },
|
||||
{ "type", stepType.ToString() },
|
||||
{ "name", stepName },
|
||||
{ "status", StepStatus.Succeeded.ToString() }
|
||||
};
|
||||
DataTable result = db.ExecuteCMD(sql, dbDict);
|
||||
return Convert.ToInt32(result.Rows[0][0]) > 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the most recent journal entries for display in CLI status output.
|
||||
/// </summary>
|
||||
public static DataTable GetRecentEntries(int limit = 50)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = @"
|
||||
SELECT SchemaVersion, StepType, StepName, Status, StartedAt, CompletedAt, ErrorMessage
|
||||
FROM migration_journal
|
||||
ORDER BY Id DESC
|
||||
LIMIT @lim";
|
||||
var dbDict = new Dictionary<string, object> { { "lim", limit } };
|
||||
return db.ExecuteCMD(sql, dbDict);
|
||||
}
|
||||
}
|
||||
}
|
||||
459
gaseous-lib/Classes/Filters.cs
Normal file
459
gaseous-lib/Classes/Filters.cs
Normal file
|
|
@ -0,0 +1,459 @@
|
|||
using System.Data;
|
||||
using System.Reflection.Metadata.Ecma335;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Classes.Metadata;
|
||||
using IGDB.Models;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
public class Filters
|
||||
{
|
||||
public enum FilterType
|
||||
{
|
||||
Platforms,
|
||||
Genres,
|
||||
GameModes,
|
||||
PlayerPerspectives,
|
||||
Themes,
|
||||
AgeGroupings
|
||||
}
|
||||
|
||||
public static async Task<List<FilterItem>> GetFilter(FilterType filterType, Metadata.AgeGroups.AgeRestrictionGroupings MaximumAgeRestriction, bool IncludeUnrated)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = string.Empty;
|
||||
|
||||
List<FilterItem> returnList = new List<FilterItem>();
|
||||
|
||||
// age restriction clauses
|
||||
string ageRestriction_Platform = "g.AgeGroupId <= " + (int)MaximumAgeRestriction;
|
||||
if (IncludeUnrated == true)
|
||||
{
|
||||
ageRestriction_Platform += " OR g.AgeGroupId IS NULL";
|
||||
}
|
||||
|
||||
switch (filterType)
|
||||
{
|
||||
case FilterType.Platforms:
|
||||
// Optimized query: use CTE with view_MetadataMap
|
||||
sql = @"
|
||||
WITH FilteredGames AS (
|
||||
SELECT DISTINCT
|
||||
g.Id,
|
||||
g.SourceId AS GameIdType
|
||||
FROM Metadata_Game g
|
||||
WHERE (" + ageRestriction_Platform + @")
|
||||
),
|
||||
GamesWithRoms AS (
|
||||
SELECT DISTINCT
|
||||
fg.Id,
|
||||
vmm.PlatformId
|
||||
FROM FilteredGames fg
|
||||
INNER JOIN view_MetadataMap vmm
|
||||
ON vmm.MetadataSourceId = fg.Id
|
||||
AND vmm.MetadataSourceType = fg.GameIdType
|
||||
WHERE vmm.RomCount > 0
|
||||
)
|
||||
SELECT
|
||||
p.Id,
|
||||
p.Name,
|
||||
COUNT(DISTINCT gwr.Id) AS GameCount
|
||||
FROM GamesWithRoms gwr
|
||||
INNER JOIN Metadata_Platform p
|
||||
ON gwr.PlatformId = p.Id
|
||||
AND p.SourceId = 0
|
||||
GROUP BY p.Id, p.Name
|
||||
ORDER BY p.Name";
|
||||
|
||||
DataTable dbResponse = await db.ExecuteCMDAsync(sql, new DatabaseMemoryCacheOptions(CacheEnabled: true, ExpirationSeconds: 300));
|
||||
|
||||
foreach (DataRow dr in dbResponse.Rows)
|
||||
{
|
||||
FilterItem item = new FilterItem(dr);
|
||||
returnList.Add(item);
|
||||
}
|
||||
|
||||
return returnList;
|
||||
|
||||
case FilterType.AgeGroupings:
|
||||
// Optimized query: use CTE with view_MetadataMap
|
||||
sql = @"
|
||||
WITH FilteredGames AS (
|
||||
SELECT DISTINCT
|
||||
g.Id,
|
||||
g.SourceId AS GameIdType
|
||||
FROM Metadata_Game g
|
||||
WHERE (" + ageRestriction_Platform + @")
|
||||
),
|
||||
GamesWithRoms AS (
|
||||
SELECT DISTINCT
|
||||
fg.Id,
|
||||
ag.AgeGroupId
|
||||
FROM FilteredGames fg
|
||||
INNER JOIN view_MetadataMap vmm
|
||||
ON vmm.MetadataSourceId = fg.Id
|
||||
AND vmm.MetadataSourceType = fg.GameIdType
|
||||
AND vmm.RomCount > 0
|
||||
LEFT JOIN Metadata_AgeGroup ag ON fg.Id = ag.GameId
|
||||
)
|
||||
SELECT
|
||||
AgeGroupId,
|
||||
COUNT(DISTINCT Id) AS GameCount
|
||||
FROM GamesWithRoms
|
||||
GROUP BY AgeGroupId
|
||||
ORDER BY AgeGroupId DESC";
|
||||
dbResponse = await db.ExecuteCMDAsync(sql, new DatabaseMemoryCacheOptions(CacheEnabled: true, ExpirationSeconds: 300));
|
||||
|
||||
foreach (DataRow dr in dbResponse.Rows)
|
||||
{
|
||||
FilterItem filterAgeGrouping = new FilterItem();
|
||||
if (dr["AgeGroupId"] == DBNull.Value)
|
||||
{
|
||||
filterAgeGrouping.Id = (int)(long)AgeGroups.AgeRestrictionGroupings.Unclassified;
|
||||
filterAgeGrouping.Name = AgeGroups.AgeRestrictionGroupings.Unclassified.ToString();
|
||||
}
|
||||
else
|
||||
{
|
||||
int ageGroupLong = (int)dr["AgeGroupId"];
|
||||
AgeGroups.AgeRestrictionGroupings ageGroup = (AgeGroups.AgeRestrictionGroupings)ageGroupLong;
|
||||
filterAgeGrouping.Id = ageGroupLong;
|
||||
filterAgeGrouping.Name = ageGroup.ToString();
|
||||
}
|
||||
filterAgeGrouping.GameCount = (int)(long)dr["GameCount"];
|
||||
returnList.Add(filterAgeGrouping);
|
||||
}
|
||||
|
||||
return returnList;
|
||||
|
||||
case FilterType.Genres:
|
||||
List<FilterItem> genres = await GenerateFilterSet(db, "Genre", ageRestriction_Platform);
|
||||
return genres;
|
||||
|
||||
case FilterType.GameModes:
|
||||
List<FilterItem> gameModes = await GenerateFilterSet(db, "GameMode", ageRestriction_Platform);
|
||||
return gameModes;
|
||||
|
||||
case FilterType.PlayerPerspectives:
|
||||
List<FilterItem> playerPerspectives = await GenerateFilterSet(db, "PlayerPerspective", ageRestriction_Platform);
|
||||
return playerPerspectives;
|
||||
|
||||
case FilterType.Themes:
|
||||
List<FilterItem> themes = await GenerateFilterSet(db, "Theme", ageRestriction_Platform);
|
||||
return themes;
|
||||
|
||||
default:
|
||||
// invalid filter type
|
||||
returnList = new List<FilterItem>();
|
||||
FilterItem invalidFilter = new FilterItem();
|
||||
invalidFilter.Name = "Invalid Filter Type";
|
||||
invalidFilter.GameCount = 0;
|
||||
invalidFilter.Id = 0;
|
||||
returnList.Add(invalidFilter);
|
||||
|
||||
return returnList;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<Dictionary<string, List<FilterItem>>> Filter(Metadata.AgeGroups.AgeRestrictionGroupings MaximumAgeRestriction, bool IncludeUnrated)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
|
||||
Dictionary<string, List<FilterItem>> FilterSet = new Dictionary<string, List<FilterItem>>();
|
||||
|
||||
// Build age restriction clause once
|
||||
string ageRestriction_Platform = "g.AgeGroupId <= " + (int)MaximumAgeRestriction;
|
||||
if (IncludeUnrated == true)
|
||||
{
|
||||
ageRestriction_Platform += " OR g.AgeGroupId IS NULL";
|
||||
}
|
||||
|
||||
// OPTIMIZED: Compute the filtered game set once using a base query
|
||||
// This replaces 6 independent expensive subqueries with 1 base query + 6 lightweight joins
|
||||
string baseFilteredGamesQuery = @"
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_FilteredGamesWithRoms (
|
||||
GameId BIGINT,
|
||||
GameIdType INT,
|
||||
PlatformId BIGINT,
|
||||
AgeGroupId INT,
|
||||
INDEX idx_gameid_type (GameId, GameIdType),
|
||||
INDEX idx_platformid (PlatformId),
|
||||
INDEX idx_agegroupid (AgeGroupId)
|
||||
) ENGINE=MEMORY;
|
||||
|
||||
TRUNCATE TABLE temp_FilteredGamesWithRoms;
|
||||
|
||||
INSERT INTO temp_FilteredGamesWithRoms
|
||||
SELECT DISTINCT
|
||||
g.Id AS GameId,
|
||||
g.SourceId AS GameIdType,
|
||||
vmm.PlatformId,
|
||||
g.AgeGroupId
|
||||
FROM Metadata_Game g
|
||||
INNER JOIN view_MetadataMap vmm
|
||||
ON vmm.MetadataSourceId = g.Id
|
||||
AND vmm.MetadataSourceType = g.SourceId
|
||||
WHERE (" + ageRestriction_Platform + @")
|
||||
AND vmm.RomCount > 0";
|
||||
|
||||
await db.ExecuteCMDAsync(baseFilteredGamesQuery, new DatabaseMemoryCacheOptions(CacheEnabled: false));
|
||||
|
||||
// Now run lightweight queries against the temp table
|
||||
|
||||
// platforms
|
||||
List<FilterItem> platforms = new List<FilterItem>();
|
||||
string sql = @"
|
||||
SELECT
|
||||
p.Id,
|
||||
p.Name,
|
||||
COUNT(DISTINCT fg.GameId) AS GameCount
|
||||
FROM temp_FilteredGamesWithRoms fg
|
||||
INNER JOIN Metadata_Platform p
|
||||
ON fg.PlatformId = p.Id
|
||||
AND p.SourceId = 0
|
||||
GROUP BY p.Id, p.Name
|
||||
ORDER BY p.Name";
|
||||
|
||||
DataTable dbResponse = await db.ExecuteCMDAsync(sql, new DatabaseMemoryCacheOptions(CacheEnabled: false));
|
||||
|
||||
foreach (DataRow dr in dbResponse.Rows)
|
||||
{
|
||||
FilterItem platformItem = new FilterItem(dr);
|
||||
platforms.Add(platformItem);
|
||||
}
|
||||
FilterSet.Add("platforms", platforms);
|
||||
|
||||
// genres
|
||||
List<FilterItem> genres = await GenerateFilterSetFromTemp(db, "Genre");
|
||||
FilterSet.Add("genres", genres);
|
||||
|
||||
// game modes
|
||||
List<FilterItem> gameModes = await GenerateFilterSetFromTemp(db, "GameMode");
|
||||
FilterSet.Add("gamemodes", gameModes);
|
||||
|
||||
// player perspectives
|
||||
List<FilterItem> playerPerspectives = await GenerateFilterSetFromTemp(db, "PlayerPerspective");
|
||||
FilterSet.Add("playerperspectives", playerPerspectives);
|
||||
|
||||
// themes
|
||||
List<FilterItem> themes = await GenerateFilterSetFromTemp(db, "Theme");
|
||||
FilterSet.Add("themes", themes);
|
||||
|
||||
// age groups
|
||||
List<FilterItem> agegroupings = new List<FilterItem>();
|
||||
sql = @"
|
||||
SELECT
|
||||
AgeGroupId,
|
||||
COUNT(DISTINCT GameId) AS GameCount
|
||||
FROM temp_FilteredGamesWithRoms
|
||||
GROUP BY AgeGroupId
|
||||
ORDER BY AgeGroupId DESC";
|
||||
dbResponse = await db.ExecuteCMDAsync(sql, new DatabaseMemoryCacheOptions(CacheEnabled: false));
|
||||
|
||||
foreach (DataRow dr in dbResponse.Rows)
|
||||
{
|
||||
FilterItem filterAgeGrouping = new FilterItem();
|
||||
if (dr["AgeGroupId"] == DBNull.Value)
|
||||
{
|
||||
filterAgeGrouping.Id = (int)(long)AgeGroups.AgeRestrictionGroupings.Unclassified;
|
||||
filterAgeGrouping.Name = AgeGroups.AgeRestrictionGroupings.Unclassified.ToString();
|
||||
}
|
||||
else
|
||||
{
|
||||
int ageGroupLong = (int)dr["AgeGroupId"];
|
||||
AgeGroups.AgeRestrictionGroupings ageGroup = (AgeGroups.AgeRestrictionGroupings)ageGroupLong;
|
||||
filterAgeGrouping.Id = ageGroupLong;
|
||||
filterAgeGrouping.Name = ageGroup.ToString();
|
||||
}
|
||||
filterAgeGrouping.GameCount = (int)(long)dr["GameCount"];
|
||||
agegroupings.Add(filterAgeGrouping);
|
||||
}
|
||||
FilterSet.Add("agegroupings", agegroupings);
|
||||
|
||||
// Clean up temp table
|
||||
await db.ExecuteCMDAsync("DROP TEMPORARY TABLE IF EXISTS temp_FilteredGamesWithRoms", new DatabaseMemoryCacheOptions(CacheEnabled: false));
|
||||
|
||||
return FilterSet;
|
||||
}
|
||||
|
||||
private static async Task<List<FilterItem>> GenerateFilterSetFromTemp(Database db, string Name)
|
||||
{
|
||||
DataTable dbResponse = await GetGenericFilterItemFromTemp(db, Name);
|
||||
Dictionary<string, FilterItem> filterDict = new Dictionary<string, FilterItem>(StringComparer.Ordinal);
|
||||
|
||||
foreach (DataRow dr in dbResponse.Rows)
|
||||
{
|
||||
FilterItem filterItem = new FilterItem(dr);
|
||||
if (filterItem?.Name != null)
|
||||
{
|
||||
if (filterDict.TryGetValue(filterItem.Name, out FilterItem? existingItem))
|
||||
{
|
||||
// Merge with existing item
|
||||
if (existingItem?.Ids != null && filterItem.Ids != null)
|
||||
{
|
||||
foreach (var id in filterItem.Ids)
|
||||
{
|
||||
if (!existingItem.Ids.ContainsKey(id.Key))
|
||||
{
|
||||
existingItem.Ids[id.Key] = id.Value;
|
||||
existingItem.GameCount += filterItem.GameCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
filterDict[filterItem.Name] = filterItem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new List<FilterItem>(filterDict.Values);
|
||||
}
|
||||
|
||||
private static async Task<DataTable> GetGenericFilterItemFromTemp(Database db, string Name)
|
||||
{
|
||||
// Lightweight query against the pre-computed temp table
|
||||
string sql = @"
|
||||
SELECT
|
||||
fg.GameIdType,
|
||||
item.Id,
|
||||
item.Name,
|
||||
COUNT(DISTINCT fg.GameId) AS GameCount
|
||||
FROM temp_FilteredGamesWithRoms fg
|
||||
INNER JOIN Relation_Game_<ITEMNAME>s rel
|
||||
ON fg.GameId = rel.GameId
|
||||
AND fg.GameIdType = rel.GameSourceId
|
||||
INNER JOIN Metadata_<ITEMNAME> item
|
||||
ON rel.<ITEMNAME>sId = item.Id
|
||||
GROUP BY fg.GameIdType, item.Id, item.Name
|
||||
ORDER BY item.Name";
|
||||
sql = sql.Replace("<ITEMNAME>", Name);
|
||||
DataTable dbResponse = await db.ExecuteCMDAsync(sql, new DatabaseMemoryCacheOptions(CacheEnabled: false));
|
||||
|
||||
return dbResponse;
|
||||
}
|
||||
|
||||
private static async Task<List<FilterItem>> GenerateFilterSet(Database db, string Name, string AgeRestriction)
|
||||
{
|
||||
DataTable dbResponse = await GetGenericFilterItem(db, Name, AgeRestriction);
|
||||
Dictionary<string, FilterItem> filterDict = new Dictionary<string, FilterItem>(StringComparer.Ordinal);
|
||||
|
||||
foreach (DataRow dr in dbResponse.Rows)
|
||||
{
|
||||
FilterItem filterItem = new FilterItem(dr);
|
||||
if (filterItem?.Name != null)
|
||||
{
|
||||
if (filterDict.TryGetValue(filterItem.Name, out FilterItem? existingItem))
|
||||
{
|
||||
// Merge with existing item
|
||||
if (existingItem?.Ids != null && filterItem.Ids != null)
|
||||
{
|
||||
foreach (var id in filterItem.Ids)
|
||||
{
|
||||
if (!existingItem.Ids.ContainsKey(id.Key))
|
||||
{
|
||||
existingItem.Ids[id.Key] = id.Value;
|
||||
existingItem.GameCount += filterItem.GameCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
filterDict[filterItem.Name] = filterItem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new List<FilterItem>(filterDict.Values);
|
||||
}
|
||||
|
||||
private static async Task<DataTable> GetGenericFilterItem(Database db, string Name, string AgeRestriction)
|
||||
{
|
||||
// Optimized query: use CTE with view_MetadataMap
|
||||
string sql = @"
|
||||
WITH FilteredGames AS (
|
||||
SELECT DISTINCT
|
||||
g.Id,
|
||||
g.SourceId AS GameIdType
|
||||
FROM Metadata_Game g
|
||||
WHERE (" + AgeRestriction + @")
|
||||
),
|
||||
GamesWithRoms AS (
|
||||
SELECT DISTINCT
|
||||
fg.Id,
|
||||
fg.GameIdType
|
||||
FROM FilteredGames fg
|
||||
INNER JOIN view_MetadataMap vmm
|
||||
ON vmm.MetadataSourceId = fg.Id
|
||||
AND vmm.MetadataSourceType = fg.GameIdType
|
||||
WHERE vmm.RomCount > 0
|
||||
)
|
||||
SELECT
|
||||
gwr.GameIdType,
|
||||
item.Id,
|
||||
item.Name,
|
||||
COUNT(DISTINCT gwr.Id) AS GameCount
|
||||
FROM GamesWithRoms gwr
|
||||
INNER JOIN Relation_Game_<ITEMNAME>s rel
|
||||
ON gwr.Id = rel.GameId
|
||||
AND gwr.GameIdType = rel.GameSourceId
|
||||
INNER JOIN Metadata_<ITEMNAME> item
|
||||
ON rel.<ITEMNAME>sId = item.Id
|
||||
GROUP BY gwr.GameIdType, item.Id, item.Name
|
||||
ORDER BY item.Name";
|
||||
sql = sql.Replace("<ITEMNAME>", Name);
|
||||
DataTable dbResponse = await db.ExecuteCMDAsync(sql, new DatabaseMemoryCacheOptions(CacheEnabled: true, ExpirationSeconds: 300));
|
||||
|
||||
return dbResponse;
|
||||
}
|
||||
|
||||
public class FilterItem
|
||||
{
|
||||
public FilterItem()
|
||||
{
|
||||
this.Name = string.Empty;
|
||||
}
|
||||
|
||||
public FilterItem(DataRow dr)
|
||||
{
|
||||
this.Name = string.Empty;
|
||||
|
||||
if (dr.Table.Columns.Contains("GameIdType"))
|
||||
{
|
||||
int gameIdTypeIndex = dr.Table.Columns.IndexOf("GameIdType");
|
||||
int idIndex = dr.Table.Columns.IndexOf("Id");
|
||||
|
||||
object? gameIdTypeValue = dr[gameIdTypeIndex];
|
||||
if (gameIdTypeValue != null && gameIdTypeValue != DBNull.Value)
|
||||
{
|
||||
if (int.TryParse(gameIdTypeValue.ToString(), out int sourceIdValue))
|
||||
{
|
||||
HasheousClient.Models.MetadataSources SourceId = (HasheousClient.Models.MetadataSources)sourceIdValue;
|
||||
this.Ids = new Dictionary<HasheousClient.Models.MetadataSources, long>(1)
|
||||
{
|
||||
{ SourceId, (long)dr[idIndex] }
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
this.Id = (long)dr["Id"];
|
||||
}
|
||||
|
||||
object? nameValue = dr["Name"];
|
||||
this.Name = nameValue?.ToString() ?? string.Empty;
|
||||
this.GameCount = (int)(long)dr["GameCount"];
|
||||
}
|
||||
|
||||
public long? Id { get; set; }
|
||||
|
||||
public Dictionary<HasheousClient.Models.MetadataSources, long>? Ids { get; set; }
|
||||
|
||||
public string Name { get; set; }
|
||||
|
||||
public int GameCount { get; set; }
|
||||
}
|
||||
}
|
||||
}
|
||||
284
gaseous-lib/Classes/GameLibrary.cs
Normal file
284
gaseous-lib/Classes/GameLibrary.cs
Normal file
|
|
@ -0,0 +1,284 @@
|
|||
using System;
|
||||
using System.Data;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Classes;
|
||||
using gaseous_server.Classes.Metadata;
|
||||
using gaseous_server.Models;
|
||||
|
||||
namespace gaseous_server
|
||||
{
|
||||
public static class GameLibrary
|
||||
{
|
||||
// exceptions
|
||||
public class PathExists : Exception
|
||||
{
|
||||
public PathExists(string path) : base("The library path " + path + " already exists.")
|
||||
{ }
|
||||
}
|
||||
|
||||
public class PathNotFound : Exception
|
||||
{
|
||||
public PathNotFound(string path) : base("The path " + path + " does not exist.")
|
||||
{ }
|
||||
}
|
||||
|
||||
public class LibraryNotFound : Exception
|
||||
{
|
||||
public LibraryNotFound(int LibraryId) : base("Library id " + LibraryId + " does not exist.")
|
||||
{ }
|
||||
}
|
||||
|
||||
public class CannotDeleteDefaultLibrary : Exception
|
||||
{
|
||||
public CannotDeleteDefaultLibrary() : base("Unable to delete the default library.")
|
||||
{ }
|
||||
}
|
||||
|
||||
public class CannotDeleteLibraryWhileScanIsActive : Exception
|
||||
{
|
||||
public CannotDeleteLibraryWhileScanIsActive() : base("Unable to delete library while a library scan is active. Wait for all scans to complete and try again")
|
||||
{ }
|
||||
}
|
||||
|
||||
// code
|
||||
public static LibraryItem GetDefaultLibrary
|
||||
{
|
||||
get
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "SELECT * FROM GameLibraries WHERE DefaultLibrary=1 LIMIT 1";
|
||||
DataTable data = db.ExecuteCMD(sql);
|
||||
DataRow row = data.Rows[0];
|
||||
LibraryItem library = new LibraryItem((int)row["Id"], (string)row["Name"], (string)row["Path"], (long)row["DefaultPlatform"], Convert.ToBoolean((int)row["DefaultLibrary"]));
|
||||
|
||||
if (!Directory.Exists(library.Path) && !(File.Exists(library.Path) && new FileInfo(library.Path).Attributes.HasFlag(FileAttributes.ReparsePoint)))
|
||||
{
|
||||
Directory.CreateDirectory(library.Path);
|
||||
}
|
||||
|
||||
return library;
|
||||
}
|
||||
}
|
||||
|
||||
// update default library path
|
||||
public static async Task UpdateDefaultLibraryPathAsync()
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "UPDATE GameLibraries SET Path=@path WHERE DefaultLibrary=1;";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>
|
||||
{
|
||||
{ "path", Path.Combine(Config.LibraryConfiguration.LibraryRootDirectory, "Library") }
|
||||
};
|
||||
await db.ExecuteCMDAsync(sql, dbDict);
|
||||
}
|
||||
|
||||
public static async Task<List<LibraryItem>> GetLibraries(bool GetStorageInfo = false)
|
||||
{
|
||||
List<LibraryItem> libraryItems = new List<LibraryItem>();
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "SELECT * FROM GameLibraries ORDER BY `Name`;";
|
||||
DataTable data = await db.ExecuteCMDAsync(sql);
|
||||
foreach (DataRow row in data.Rows)
|
||||
{
|
||||
LibraryItem library = new LibraryItem((int)row["Id"], (string)row["Name"], (string)row["Path"], (long)row["DefaultPlatform"], Convert.ToBoolean((int)row["DefaultLibrary"]));
|
||||
if (GetStorageInfo == true)
|
||||
{
|
||||
library.PathInfo = SystemInfo.GetDisk(library.Path);
|
||||
}
|
||||
libraryItems.Add(library);
|
||||
|
||||
if (library.IsDefaultLibrary == true)
|
||||
{
|
||||
// check directory exists
|
||||
if (!Directory.Exists(library.Path) && !(File.Exists(library.Path) && new FileInfo(library.Path).Attributes.HasFlag(FileAttributes.ReparsePoint)))
|
||||
{
|
||||
Directory.CreateDirectory(library.Path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return libraryItems;
|
||||
}
|
||||
|
||||
public static async Task<LibraryItem> AddLibrary(string Name, string Path, long DefaultPlatformId)
|
||||
{
|
||||
string PathName = Common.NormalizePath(Path);
|
||||
|
||||
// check path isn't already in place
|
||||
foreach (LibraryItem item in await GetLibraries())
|
||||
{
|
||||
if (Common.NormalizePath(PathName) == Common.NormalizePath(item.Path))
|
||||
{
|
||||
// already existing path!
|
||||
throw new PathExists(PathName);
|
||||
}
|
||||
}
|
||||
|
||||
if (!System.IO.Path.Exists(PathName))
|
||||
{
|
||||
throw new PathNotFound(PathName);
|
||||
}
|
||||
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "INSERT INTO GameLibraries (Name, Path, DefaultPlatform, DefaultLibrary) VALUES (@name, @path, @defaultplatform, 0); SELECT CAST(LAST_INSERT_ID() AS SIGNED);";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
dbDict.Add("name", Name);
|
||||
dbDict.Add("path", PathName);
|
||||
dbDict.Add("defaultplatform", DefaultPlatformId);
|
||||
DataTable data = await db.ExecuteCMDAsync(sql, dbDict);
|
||||
|
||||
int newLibraryId = (int)(long)data.Rows[0][0];
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.library_management", "librarymanagement.created_library_at_directory", null, new string[] { Name, PathName });
|
||||
|
||||
LibraryItem library = await GetLibrary(newLibraryId);
|
||||
|
||||
return library;
|
||||
}
|
||||
|
||||
public static async Task<LibraryItem> EditLibrary(int LibraryId, string Name, long DefaultPlatformId)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "UPDATE GameLibraries SET Name=@name, DefaultPlatform=@defaultplatform WHERE Id=@id;";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
dbDict.Add("name", Name);
|
||||
dbDict.Add("defaultplatform", DefaultPlatformId);
|
||||
dbDict.Add("id", LibraryId);
|
||||
await db.ExecuteCMDAsync(sql, dbDict);
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.library_management", "librarymanagement.updated_library", null, new string[] { Name });
|
||||
|
||||
return await GetLibrary(LibraryId);
|
||||
}
|
||||
|
||||
public static async Task DeleteLibrary(int LibraryId)
|
||||
{
|
||||
LibraryItem library = await GetLibrary(LibraryId);
|
||||
if (library.IsDefaultLibrary == false)
|
||||
{
|
||||
// check for active library scans
|
||||
foreach (ProcessQueue.QueueProcessor.QueueItem item in ProcessQueue.QueueProcessor.QueueItems)
|
||||
{
|
||||
if (
|
||||
(item.ItemType == ProcessQueue.QueueItemType.LibraryScan && item.ItemState == ProcessQueue.QueueProcessor.QueueItemState.Running) ||
|
||||
(item.ItemType == ProcessQueue.QueueItemType.LibraryScanWorker && item.ItemState == ProcessQueue.QueueProcessor.QueueItemState.Running)
|
||||
)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.library_management", "librarymanagement.unable_to_delete_libraries_while_scan_running");
|
||||
throw new CannotDeleteLibraryWhileScanIsActive();
|
||||
}
|
||||
}
|
||||
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
DataTable affectedMetadataMaps = await db.ExecuteCMDAsync("SELECT DISTINCT MetadataMapId FROM Games_Roms WHERE LibraryId=@id AND MetadataMapId IS NOT NULL;", new Dictionary<string, object>()
|
||||
{
|
||||
{ "id", LibraryId }
|
||||
});
|
||||
string sql = "DELETE FROM Games_Roms WHERE LibraryId=@id; DELETE FROM GameLibraries WHERE Id=@id;";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
dbDict.Add("id", LibraryId);
|
||||
await db.ExecuteCMDAsync(sql, dbDict);
|
||||
MetadataManagement.UpdateRomCounts(affectedMetadataMaps.AsEnumerable().Where(row => row["MetadataMapId"] != DBNull.Value).Select(row => (long)row["MetadataMapId"]));
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.library_management", "librarymanagement.deleted_library_at_path", null, new string[] { library.Name, library.Path });
|
||||
}
|
||||
else
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.library_management", "librarymanagement.unable_to_delete_default_library");
|
||||
throw new CannotDeleteDefaultLibrary();
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<LibraryItem> GetLibrary(int LibraryId, bool GetStorageInfo = false)
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "SELECT * FROM GameLibraries WHERE Id=@id";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
dbDict.Add("id", LibraryId);
|
||||
DataTable data = await db.ExecuteCMDAsync(sql, dbDict);
|
||||
if (data.Rows.Count > 0)
|
||||
{
|
||||
DataRow row = data.Rows[0];
|
||||
LibraryItem library = new LibraryItem((int)row["Id"], (string)row["Name"], (string)row["Path"], (long)row["DefaultPlatform"], Convert.ToBoolean((int)row["DefaultLibrary"]));
|
||||
|
||||
if (GetStorageInfo == true)
|
||||
{
|
||||
library.PathInfo = SystemInfo.GetDisk(library.Path);
|
||||
}
|
||||
|
||||
return library;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new LibraryNotFound(LibraryId);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<LibraryItem> ScanLibrary(int LibraryId)
|
||||
{
|
||||
// add the library to scan to the queue
|
||||
LibraryItem library = await GetLibrary(LibraryId);
|
||||
|
||||
// start the library scan if it's not already running
|
||||
foreach (ProcessQueue.QueueProcessor.QueueItem item in ProcessQueue.QueueProcessor.QueueItems)
|
||||
{
|
||||
if (item.ItemType == ProcessQueue.QueueItemType.LibraryScan && item.ItemState != ProcessQueue.QueueProcessor.QueueItemState.Running)
|
||||
{
|
||||
item.AddSubTask(ProcessQueue.QueueItemSubTasks.LibraryScanWorker, library.Name, library, true);
|
||||
item.ForceExecute();
|
||||
}
|
||||
}
|
||||
|
||||
return library;
|
||||
}
|
||||
|
||||
public class LibraryItem
|
||||
{
|
||||
public LibraryItem(int Id, string Name, string Path, long DefaultPlatformId, bool IsDefaultLibrary)
|
||||
{
|
||||
_Id = Id;
|
||||
_Name = Name;
|
||||
_Path = Path;
|
||||
_DefaultPlatformId = DefaultPlatformId;
|
||||
_IsDefaultLibrary = IsDefaultLibrary;
|
||||
|
||||
if (_IsDefaultLibrary)
|
||||
{
|
||||
if (!Directory.Exists(Path) && !(File.Exists(Path) && new FileInfo(Path).Attributes.HasFlag(FileAttributes.ReparsePoint)))
|
||||
{
|
||||
Directory.CreateDirectory(Path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int _Id = 0;
|
||||
string _Name = "";
|
||||
string _Path = "";
|
||||
long _DefaultPlatformId = 0;
|
||||
bool _IsDefaultLibrary = false;
|
||||
|
||||
public int Id => _Id;
|
||||
public string Name => _Name;
|
||||
public string Path => _Path;
|
||||
public long DefaultPlatformId => _DefaultPlatformId;
|
||||
public string? DefaultPlatformName
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_DefaultPlatformId != 0)
|
||||
{
|
||||
HasheousClient.Models.Metadata.IGDB.Platform platform = Platforms.GetPlatform(_DefaultPlatformId).Result;
|
||||
return platform.Name;
|
||||
}
|
||||
else
|
||||
{
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
public bool IsDefaultLibrary => _IsDefaultLibrary;
|
||||
|
||||
public SystemInfoModel.PathItem? PathInfo { get; set; }
|
||||
}
|
||||
}
|
||||
}
|
||||
800
gaseous-lib/Classes/HTTPComms.cs
Normal file
800
gaseous-lib/Classes/HTTPComms.cs
Normal file
|
|
@ -0,0 +1,800 @@
|
|||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides common high level HTTP communications methods such as GET, POST, PUT, DELETE, HEAD, etc. while handling errors, exceptions, logging, retries, timeouts, and other common concerns.
|
||||
/// Each method should accept parameters for expected return type using T, URL, headers, body (if applicable), timeout, and retry policy.
|
||||
/// Each method should return a standardized response object containing status code, headers, body, and any error information.
|
||||
/// It should also support asynchronous operations using async/await patterns.
|
||||
/// Where applicable, it should support JSON serialization/deserialization for request and response bodies.
|
||||
/// It should also include logging hooks to allow integration with logging frameworks for request/response logging.
|
||||
/// </summary>
|
||||
public class HTTPComms
|
||||
{
|
||||
private string _userAgent
|
||||
{
|
||||
get
|
||||
{
|
||||
// get the assembly version
|
||||
var assemblyVersion = System.Reflection.Assembly.GetExecutingAssembly().GetName().Version;
|
||||
|
||||
return $"GaseousServer/{assemblyVersion} (.NET {System.Environment.Version}; {System.Runtime.InteropServices.RuntimeInformation.OSDescription})";
|
||||
}
|
||||
}
|
||||
|
||||
private HttpClient _httpClient = new HttpClient();
|
||||
|
||||
private static int _defaultRetryCount = 3;
|
||||
|
||||
private static TimeSpan _defaultTimeout = TimeSpan.FromSeconds(30);
|
||||
|
||||
// JsonSerializerOptions configured to handle property hiding/new keyword scenarios
|
||||
public static readonly System.Text.Json.JsonSerializerOptions _jsonOptions = new System.Text.Json.JsonSerializerOptions
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
IncludeFields = false,
|
||||
PreferredObjectCreationHandling = System.Text.Json.Serialization.JsonObjectCreationHandling.Populate,
|
||||
// This setting tells the serializer to prefer derived class properties over base class properties
|
||||
TypeInfoResolver = new System.Text.Json.Serialization.Metadata.DefaultJsonTypeInfoResolver(),
|
||||
Converters = { new CaseInsensitiveEnumConverter(), new FlexibleNumberConverter() }
|
||||
};
|
||||
|
||||
// Rate limiting parameters
|
||||
// time window in seconds
|
||||
private static int _rateLimitWindow = 60;
|
||||
// max requests allowed in the time window
|
||||
private static int _maxRequestsPerWindow = 100;
|
||||
private static int _rateLimit429WaitTimeSeconds = 60;
|
||||
private static int _rateLimit420WaitTimeSeconds = 120;
|
||||
|
||||
// Track request timestamps for rate limiting per host
|
||||
private static readonly Dictionary<string, Queue<DateTime>> _hostRequestTimestamps = new Dictionary<string, Queue<DateTime>>();
|
||||
// Optional per-host rate limit overrides: domain -> options
|
||||
/// <summary>
|
||||
/// Options for per-host rate limiting configuration.
|
||||
/// </summary>
|
||||
public class RateLimitOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Time window in seconds for calculating the rate limit.
|
||||
/// </summary>
|
||||
public int WindowSeconds { get; set; }
|
||||
/// <summary>
|
||||
/// Maximum number of requests allowed within the time window.
|
||||
/// </summary>
|
||||
public int MaxRequests { get; set; }
|
||||
}
|
||||
private static readonly Dictionary<string, RateLimitOptions> _perHostRateLimits = new Dictionary<string, RateLimitOptions>();
|
||||
private static readonly object _rateLimitLock = new object();
|
||||
|
||||
/// <summary>
|
||||
/// Sets rate limit options for a specific host.
|
||||
/// </summary>
|
||||
/// <param name="host">Domain name (e.g., example.com).</param>
|
||||
/// <param name="options">Rate limit options for the host.</param>
|
||||
public static void SetRateLimitForHost(string host, RateLimitOptions options)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(host)) throw new ArgumentException("Host is required", nameof(host));
|
||||
if (options == null) throw new ArgumentNullException(nameof(options));
|
||||
lock (_rateLimitLock)
|
||||
{
|
||||
_perHostRateLimits[host] = options;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replaces all per-host rate limits with the provided dictionary.
|
||||
/// </summary>
|
||||
/// <param name="limits">Dictionary mapping host to rate limit options.</param>
|
||||
public static void SetRateLimits(Dictionary<string, RateLimitOptions> limits)
|
||||
{
|
||||
if (limits == null) throw new ArgumentNullException(nameof(limits));
|
||||
lock (_rateLimitLock)
|
||||
{
|
||||
_perHostRateLimits.Clear();
|
||||
foreach (var kvp in limits)
|
||||
{
|
||||
_perHostRateLimits[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the HTTP methods supported by HTTPComms.
|
||||
/// </summary>
|
||||
public enum HttpMethod
|
||||
{
|
||||
/// <summary>
|
||||
/// HTTP GET method.
|
||||
/// </summary>
|
||||
GET,
|
||||
/// <summary>
|
||||
/// HTTP POST method.
|
||||
/// </summary>
|
||||
POST,
|
||||
/// <summary>
|
||||
/// HTTP PUT method.
|
||||
/// </summary>
|
||||
PUT,
|
||||
/// <summary>
|
||||
/// HTTP DELETE method.
|
||||
/// </summary>
|
||||
DELETE,
|
||||
/// <summary>
|
||||
/// HTTP HEAD method.
|
||||
/// </summary>
|
||||
HEAD
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a standardized HTTP response containing status code, headers, body, and error information.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">The type of the response body.</typeparam>
|
||||
public class HttpResponse<T>
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the HTTP status code returned by the server.
|
||||
/// </summary>
|
||||
public int StatusCode { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the HTTP headers returned by the server.
|
||||
/// </summary>
|
||||
public Dictionary<string, string> Headers { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the body of the HTTP response.
|
||||
/// </summary>
|
||||
public T? Body { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the error message associated with the HTTP response, if any.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the error type if an exception occurred.
|
||||
/// </summary>
|
||||
public string? ErrorType { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the error stack trace if an exception occurred.
|
||||
/// </summary>
|
||||
public string? ErrorStackTrace { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optionally contains the raw HttpResponseMessage for advanced scenarios.
|
||||
/// </summary>
|
||||
public HttpResponseMessage? RawResponse { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="HttpResponse{T}"/> class.
|
||||
/// </summary>
|
||||
public HttpResponse()
|
||||
{
|
||||
Headers = new Dictionary<string, string>();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new <see cref="System.Text.Json.JsonSerializerOptions"/> instance with HTTPComms custom converters and settings.
|
||||
/// </summary>
|
||||
/// <returns>Configured <see cref="System.Text.Json.JsonSerializerOptions"/> with case-insensitive enums and flexible number handling.</returns>
|
||||
public static System.Text.Json.JsonSerializerOptions GetConfiguredOptions()
|
||||
{
|
||||
return new System.Text.Json.JsonSerializerOptions
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
IncludeFields = false,
|
||||
PreferredObjectCreationHandling = System.Text.Json.Serialization.JsonObjectCreationHandling.Populate,
|
||||
TypeInfoResolver = new System.Text.Json.Serialization.Metadata.DefaultJsonTypeInfoResolver(),
|
||||
Converters = { new CaseInsensitiveEnumConverter(), new FlexibleNumberConverter() }
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sends an HTTP request asynchronously. Automatically enforces per-host rate limits, handles content types, supports cancellation and chunked downloads.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">Expected response type. If <c>T</c> is <c>byte[]</c> or <c>string</c>, special handling is applied.</typeparam>
|
||||
/// <param name="method">HTTP method to use.</param>
|
||||
/// <param name="url">Request URL.</param>
|
||||
/// <param name="headers">Optional request headers.</param>
|
||||
/// <param name="body">Optional JSON-serializable body for POST/PUT.</param>
|
||||
/// <param name="timeout">Optional timeout; defaults to class default.</param>
|
||||
/// <param name="retryCount">Number of retries for transient failures (e.g., 429/420).</param>
|
||||
/// <param name="cancellationToken">Cancellation token to cancel the request and any ongoing I/O operations.</param>
|
||||
/// <param name="returnRawResponse">If true, populates RawResponse in the returned object.</param>
|
||||
/// <param name="progressHandler">Optional progress callback for downloads: (bytesRead, totalBytes). Called on start, progress, completion.</param>
|
||||
/// <param name="chunkThresholdBytes">If content length exceeds this, stream in chunks; default 5 MB. Ignored unless T is byte[].</param>
|
||||
/// <param name="enableResume">When true and server supports ranges, attempts to resume downloads starting at <paramref name="resumeFromBytes"/>.</param>
|
||||
/// <param name="resumeFromBytes">Starting offset in bytes for resuming a download. Effective only when <paramref name="enableResume"/> is true and server supports ranges.</param>
|
||||
/// <param name="contentType">Optional content type for the request body (e.g., "application/json", "application/xml"). Defaults to "text/plain" if not specified.</param>
|
||||
/// <param name="jsonSerializerOptions">Optional custom JsonSerializerOptions for deserialization. If null, uses the default options configured for this class.</param>
|
||||
public async Task<HttpResponse<T>> SendRequestAsync<T>(HttpMethod method, Uri url, Dictionary<string, string>? headers = null, object? body = null, TimeSpan? timeout = null, int retryCount = 0, System.Threading.CancellationToken cancellationToken = default, bool returnRawResponse = false, Action<long, long?>? progressHandler = null, long chunkThresholdBytes = 5 * 1024 * 1024, bool enableResume = true, long resumeFromBytes = 0, string? contentType = null, System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
|
||||
{
|
||||
// Use provided options or fall back to default
|
||||
var options = jsonSerializerOptions ?? _jsonOptions;
|
||||
var requestTimeout = timeout ?? _defaultTimeout;
|
||||
|
||||
// Create empty response object
|
||||
var response = new HttpResponse<T>();
|
||||
|
||||
int attempts = 0;
|
||||
int maxAttempts = retryCount > 0 ? retryCount : _defaultRetryCount;
|
||||
|
||||
// Main retry loop
|
||||
while (attempts < maxAttempts)
|
||||
{
|
||||
// Build a per-attempt timeout using a linked cancellation token.
|
||||
// Retries must get a fresh timeout window rather than reusing a canceled token from a prior attempt.
|
||||
using var timeoutCts = new System.Threading.CancellationTokenSource(requestTimeout);
|
||||
using var linkedCts = System.Threading.CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||
var effectiveToken = linkedCts.Token;
|
||||
|
||||
// --- Rate limiting logic scoped to host ---
|
||||
// Extract host from URL
|
||||
var uri = url;
|
||||
string host = uri.Host;
|
||||
bool shouldWait = false;
|
||||
int waitMs = 0;
|
||||
lock (_rateLimitLock)
|
||||
{
|
||||
DateTime now = DateTime.UtcNow;
|
||||
// Get or create the queue for this host
|
||||
if (!_hostRequestTimestamps.ContainsKey(host))
|
||||
{
|
||||
_hostRequestTimestamps[host] = new Queue<DateTime>();
|
||||
}
|
||||
var hostQueue = _hostRequestTimestamps[host];
|
||||
// Rate limit window and max requests - use per-host override if available
|
||||
int window = _rateLimitWindow;
|
||||
int maxReq = _maxRequestsPerWindow;
|
||||
if (_perHostRateLimits.TryGetValue(host, out var opts))
|
||||
{
|
||||
window = opts.WindowSeconds;
|
||||
maxReq = opts.MaxRequests;
|
||||
}
|
||||
// Remove timestamps outside the window
|
||||
while (hostQueue.Count > 0 && (now - hostQueue.Peek()).TotalSeconds > window)
|
||||
{
|
||||
hostQueue.Dequeue();
|
||||
}
|
||||
// If we've hit the max requests for the window, calculate how long to wait
|
||||
if (hostQueue.Count >= maxReq)
|
||||
{
|
||||
shouldWait = true;
|
||||
// Wait until the oldest request is outside the window
|
||||
var oldest = hostQueue.Peek();
|
||||
waitMs = (int)Math.Max(0, (window - (now - oldest).TotalSeconds) * 1000);
|
||||
}
|
||||
}
|
||||
// If rate limit exceeded for this host, wait before sending the request
|
||||
if (shouldWait && waitMs > 0)
|
||||
{
|
||||
await Task.Delay(waitMs, cancellationToken);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
HttpResponseMessage httpResponseMessage;
|
||||
|
||||
// Build the HttpRequestMessage
|
||||
using (var requestMessage = new HttpRequestMessage(new System.Net.Http.HttpMethod(method.ToString()), url))
|
||||
{
|
||||
// Add body for POST/PUT requests
|
||||
if (body != null && (method == HttpMethod.POST || method == HttpMethod.PUT))
|
||||
{
|
||||
string bodyContent;
|
||||
if (body is string bodyString)
|
||||
{
|
||||
bodyContent = System.Net.WebUtility.HtmlEncode(bodyString);
|
||||
}
|
||||
else
|
||||
{
|
||||
bodyContent = System.Text.Json.JsonSerializer.Serialize(body, options);
|
||||
}
|
||||
|
||||
string effectiveContentType = contentType ?? "application/json";
|
||||
requestMessage.Content = new StringContent(bodyContent, System.Text.Encoding.UTF8, effectiveContentType);
|
||||
}
|
||||
|
||||
// Set default and request-specific headers on the per-request message.
|
||||
// Avoid mutating HttpClient.DefaultRequestHeaders, which is unsafe under concurrent requests.
|
||||
requestMessage.Headers.TryAddWithoutValidation("User-Agent", _userAgent);
|
||||
if (headers != null)
|
||||
{
|
||||
foreach (var header in headers)
|
||||
{
|
||||
if (!requestMessage.Headers.TryAddWithoutValidation(header.Key, header.Value) && requestMessage.Content != null)
|
||||
{
|
||||
requestMessage.Content.Headers.TryAddWithoutValidation(header.Key, header.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If resuming a large binary download, add Range header
|
||||
if (typeof(T) == typeof(byte[]) && enableResume && resumeFromBytes > 0 && (method == HttpMethod.GET))
|
||||
{
|
||||
requestMessage.Headers.Range = new System.Net.Http.Headers.RangeHeaderValue(resumeFromBytes, null);
|
||||
}
|
||||
|
||||
// Send the request (respect cancellation)
|
||||
httpResponseMessage = await _httpClient.SendAsync(requestMessage, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, effectiveToken);
|
||||
}
|
||||
|
||||
// Record this request timestamp for rate limiting (per host)
|
||||
lock (_rateLimitLock)
|
||||
{
|
||||
if (!_hostRequestTimestamps.ContainsKey(host))
|
||||
{
|
||||
_hostRequestTimestamps[host] = new Queue<DateTime>();
|
||||
}
|
||||
_hostRequestTimestamps[host].Enqueue(DateTime.UtcNow);
|
||||
}
|
||||
|
||||
// Populate response object with status code and headers
|
||||
response.StatusCode = (int)httpResponseMessage.StatusCode;
|
||||
if (returnRawResponse)
|
||||
{
|
||||
response.RawResponse = httpResponseMessage;
|
||||
}
|
||||
foreach (var header in httpResponseMessage.Headers)
|
||||
{
|
||||
response.Headers[header.Key] = string.Join(", ", header.Value);
|
||||
}
|
||||
// Also include content headers
|
||||
foreach (var header in httpResponseMessage.Content.Headers)
|
||||
{
|
||||
response.Headers[header.Key] = string.Join(", ", header.Value);
|
||||
}
|
||||
|
||||
// Decide how to read based on content-type and T
|
||||
var mediaType = httpResponseMessage.Content.Headers.ContentType?.MediaType?.ToLowerInvariant();
|
||||
long? contentLength = httpResponseMessage.Content.Headers.ContentLength;
|
||||
bool isBinary = mediaType != null && (mediaType.Contains("octet-stream") || mediaType.StartsWith("image/") || mediaType.StartsWith("video/") || mediaType.StartsWith("audio/"));
|
||||
bool isCloudFlareError = false;
|
||||
|
||||
static bool IsLikelyJson(string value)
|
||||
{
|
||||
var trimmed = value.TrimStart();
|
||||
if (trimmed.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
char first = trimmed[0];
|
||||
return first == '{' || first == '[' || first == '"' || first == 't' || first == 'f' || first == 'n' || first == '-' || char.IsDigit(first);
|
||||
}
|
||||
|
||||
static bool ContainsCloudFlare1015(string value)
|
||||
{
|
||||
return value.IndexOf("error code: 1015", StringComparison.OrdinalIgnoreCase) >= 0;
|
||||
}
|
||||
|
||||
if (typeof(T) == typeof(byte[]))
|
||||
{
|
||||
// Determine if server supports resume via Accept-Ranges
|
||||
bool serverSupportsRanges = response.Headers.TryGetValue("Accept-Ranges", out var acceptRangesVal) && acceptRangesVal.Contains("bytes");
|
||||
long startingOffset = (enableResume && resumeFromBytes > 0 && serverSupportsRanges) ? resumeFromBytes : 0;
|
||||
|
||||
// Chunked download for large payloads, with progress
|
||||
if ((contentLength.HasValue && contentLength.Value >= chunkThresholdBytes) || startingOffset > 0)
|
||||
{
|
||||
using var stream = await httpResponseMessage.Content.ReadAsStreamAsync(effectiveToken);
|
||||
using var ms = new MemoryStream();
|
||||
if (startingOffset > 0)
|
||||
{
|
||||
// Pre-size stream by writing zeros or set position; here we just track totalRead starting at startingOffset
|
||||
ms.Position = 0; // we will append bytes; caller can decide how to persist resumed data externally
|
||||
}
|
||||
var buffer = new byte[81920];
|
||||
long totalRead = 0;
|
||||
// If resuming, initial progress starts at startingOffset
|
||||
progressHandler?.Invoke(startingOffset, contentLength.HasValue ? contentLength + startingOffset : null);
|
||||
int read;
|
||||
while ((read = await stream.ReadAsync(buffer, 0, buffer.Length, effectiveToken)) > 0)
|
||||
{
|
||||
ms.Write(buffer, 0, read);
|
||||
totalRead += read;
|
||||
// Report progress including starting offset
|
||||
var reportedTotal = startingOffset + totalRead;
|
||||
var reportedLength = contentLength.HasValue ? contentLength + startingOffset : null;
|
||||
progressHandler?.Invoke(reportedTotal, reportedLength);
|
||||
}
|
||||
progressHandler?.Invoke(startingOffset + totalRead, contentLength.HasValue ? contentLength + startingOffset : null);
|
||||
response.Body = (T)(object)ms.ToArray();
|
||||
}
|
||||
else
|
||||
{
|
||||
var bytes = await httpResponseMessage.Content.ReadAsByteArrayAsync(effectiveToken);
|
||||
response.Body = (T)(object)bytes;
|
||||
}
|
||||
|
||||
if (response.Body != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
string bodyText = System.Text.Encoding.UTF8.GetString((byte[])(object)response.Body!).ToLowerInvariant();
|
||||
if (bodyText.Contains("error code: 1015"))
|
||||
{
|
||||
isCloudFlareError = true;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore decode issues and continue standard handling.
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
string responseBody = await httpResponseMessage.Content.ReadAsStringAsync(effectiveToken);
|
||||
|
||||
// CloudFlare can return HTML/text with 200 and "error code: 1015"; detect this before any JSON parsing.
|
||||
if (!string.IsNullOrWhiteSpace(responseBody) && ContainsCloudFlare1015(responseBody))
|
||||
{
|
||||
isCloudFlareError = true;
|
||||
}
|
||||
|
||||
if (!isCloudFlareError)
|
||||
{
|
||||
if (typeof(T) == typeof(string))
|
||||
{
|
||||
response.Body = (T)(object)responseBody;
|
||||
}
|
||||
else if (mediaType != null && mediaType.Contains("xml"))
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(responseBody))
|
||||
{
|
||||
var serializer = new System.Xml.Serialization.XmlSerializer(typeof(T));
|
||||
using var reader = new StringReader(responseBody);
|
||||
response.Body = (T?)serializer.Deserialize(reader);
|
||||
}
|
||||
}
|
||||
else if (mediaType != null && (mediaType.Contains("json") || mediaType.StartsWith("text")))
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(responseBody) && IsLikelyJson(responseBody))
|
||||
{
|
||||
response.Body = System.Text.Json.JsonSerializer.Deserialize<T>(responseBody, options);
|
||||
}
|
||||
}
|
||||
else if (isBinary)
|
||||
{
|
||||
// Not expected for non-byte[] callers; leave body unset.
|
||||
}
|
||||
else
|
||||
{
|
||||
// Fallback: only attempt JSON parse if payload looks like JSON.
|
||||
if (!string.IsNullOrWhiteSpace(responseBody) && IsLikelyJson(responseBody))
|
||||
{
|
||||
response.Body = System.Text.Json.JsonSerializer.Deserialize<T>(responseBody, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isCloudFlareError)
|
||||
{
|
||||
attempts++;
|
||||
if (attempts < maxAttempts)
|
||||
{
|
||||
int waitTime = _rateLimit429WaitTimeSeconds;
|
||||
await Task.Delay(waitTime * 1000, cancellationToken);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If request was successful and not a CloudFlare error, exit loop
|
||||
if (httpResponseMessage.IsSuccessStatusCode && !isCloudFlareError)
|
||||
{
|
||||
response.ErrorMessage = null;
|
||||
response.ErrorType = null;
|
||||
response.ErrorStackTrace = null;
|
||||
break;
|
||||
}
|
||||
|
||||
// If CloudFlare error with retries exhausted, break
|
||||
if (isCloudFlareError && attempts >= maxAttempts)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// Handle 429 Too Many Requests response
|
||||
if (response.StatusCode == 429)
|
||||
{
|
||||
attempts++;
|
||||
int waitTime = _rateLimit429WaitTimeSeconds;
|
||||
// Check for Retry-After header and use it if present
|
||||
if (response.Headers.ContainsKey("Retry-After"))
|
||||
{
|
||||
var retryVal = response.Headers["Retry-After"];
|
||||
if (int.TryParse(retryVal, out int retryAfterSeconds))
|
||||
{
|
||||
waitTime = retryAfterSeconds;
|
||||
}
|
||||
else if (DateTime.TryParse(retryVal, out var retryDate))
|
||||
{
|
||||
var diff = (int)Math.Max(0, (retryDate.ToUniversalTime() - DateTime.UtcNow).TotalSeconds);
|
||||
waitTime = diff;
|
||||
}
|
||||
}
|
||||
// Wait before retrying
|
||||
await Task.Delay(waitTime * 1000, cancellationToken);
|
||||
}
|
||||
// Handle 420 Enhance Your Calm response
|
||||
else if (response.StatusCode == 420)
|
||||
{
|
||||
attempts++;
|
||||
await Task.Delay(_rateLimit420WaitTimeSeconds * 1000, cancellationToken);
|
||||
}
|
||||
else if (!isCloudFlareError && !httpResponseMessage.IsSuccessStatusCode)
|
||||
{
|
||||
// For other errors, do not retry
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException ex) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
response.ErrorMessage = ex.Message;
|
||||
response.ErrorType = ex.GetType().FullName;
|
||||
response.ErrorStackTrace = ex.StackTrace;
|
||||
Logging.LogKey(Logging.LogType.Warning, "HTTPComms", $"Request canceled by caller on attempt {attempts + 1} for {method} {url}.", null, null, ex);
|
||||
throw;
|
||||
}
|
||||
catch (OperationCanceledException ex) when (timeoutCts.IsCancellationRequested)
|
||||
{
|
||||
string timeoutMessage = $"Request timed out after {requestTimeout.TotalSeconds:0.#} seconds.";
|
||||
Logging.LogKey(Logging.LogType.Warning, "HTTPComms", $"{timeoutMessage} Attempt {attempts + 1} for {method} {url}.", null, null, ex);
|
||||
response.ErrorMessage = timeoutMessage;
|
||||
response.ErrorType = typeof(TimeoutException).FullName;
|
||||
response.ErrorStackTrace = ex.StackTrace;
|
||||
attempts++;
|
||||
|
||||
if (attempts >= maxAttempts)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
await Task.Delay(2000 * attempts, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// On exception, record error and retry with exponential backoff
|
||||
Logging.LogKey(Logging.LogType.Warning, "HTTPComms", $"Exception on attempt {attempts + 1} for {method} {url}: {ex.Message}", null, null, ex);
|
||||
response.ErrorMessage = ex.Message;
|
||||
response.ErrorType = ex.GetType().FullName;
|
||||
response.ErrorStackTrace = ex.StackTrace;
|
||||
attempts++;
|
||||
|
||||
if (attempts >= maxAttempts)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
await Task.Delay(2000 * attempts, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
// Return the response object
|
||||
return response;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks download capabilities via a HEAD request: Accept-Ranges and Content-Length.
|
||||
/// </summary>
|
||||
/// <param name="url">The URL to inspect.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Tuple indicating AcceptRanges (bytes) and ContentLength if known.</returns>
|
||||
public async Task<(bool AcceptRanges, long? ContentLength)> CheckDownloadCapabilitiesAsync(Uri url, System.Threading.CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var headRequest = new HttpRequestMessage(new System.Net.Http.HttpMethod("HEAD"), url);
|
||||
var headResponse = await _httpClient.SendAsync(headRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken);
|
||||
bool acceptRanges = false;
|
||||
long? contentLen = headResponse.Content.Headers.ContentLength;
|
||||
if (headResponse.Headers.TryGetValues("Accept-Ranges", out var ranges))
|
||||
{
|
||||
foreach (var v in ranges)
|
||||
{
|
||||
if (v.Contains("bytes")) { acceptRanges = true; break; }
|
||||
}
|
||||
}
|
||||
return (acceptRanges, contentLen);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Downloads content to a file with optional resume support, progress reporting, and cancellation.
|
||||
/// </summary>
|
||||
/// <param name="url">The URL to download.</param>
|
||||
/// <param name="destinationFilePath">The file path to write to.</param>
|
||||
/// <param name="headers">Optional headers to include with the request.</param>
|
||||
/// <param name="cancellationToken">Cancellation token to cancel the download.</param>
|
||||
/// <param name="progressHandler">Optional progress callback: (bytesWritten, totalBytes).</param>
|
||||
/// <param name="overwrite">If true, deletes any existing file before downloading.</param>
|
||||
/// <returns>HTTP response containing any body bytes (also written to file) and status information.</returns>
|
||||
public async Task<HttpResponse<byte[]>> DownloadToFileAsync(Uri url, string destinationFilePath, Dictionary<string, string>? headers = null, System.Threading.CancellationToken cancellationToken = default, Action<long, long?>? progressHandler = null, bool overwrite = false)
|
||||
{
|
||||
if (url == null) throw new ArgumentNullException(nameof(url));
|
||||
if (string.IsNullOrWhiteSpace(destinationFilePath)) throw new ArgumentException("Destination file path is required", nameof(destinationFilePath));
|
||||
|
||||
// Overwrite handling
|
||||
if (overwrite && System.IO.File.Exists(destinationFilePath))
|
||||
{
|
||||
System.IO.File.Delete(destinationFilePath);
|
||||
}
|
||||
|
||||
long existingLength = 0;
|
||||
if (System.IO.File.Exists(destinationFilePath))
|
||||
{
|
||||
var info = new System.IO.FileInfo(destinationFilePath);
|
||||
existingLength = info.Length;
|
||||
}
|
||||
|
||||
// Capability pre-check
|
||||
var (acceptRanges, contentLen) = await CheckDownloadCapabilitiesAsync(url, cancellationToken);
|
||||
|
||||
// Use SendRequestAsync with resume enabled; write bytes to file as they arrive
|
||||
var response = await SendRequestAsync<byte[]>(HttpMethod.GET, url, headers, null, TimeSpan.FromMinutes(5), _defaultRetryCount, cancellationToken, false, (read, total) =>
|
||||
{
|
||||
// Report cumulative progress including existingLength if resuming
|
||||
var reported = existingLength + read;
|
||||
long? totalWithExisting = total.HasValue ? existingLength + total.Value : (contentLen.HasValue ? existingLength + contentLen.Value : null);
|
||||
progressHandler?.Invoke(reported, totalWithExisting);
|
||||
},
|
||||
1 * 1024 * 1024, // stream when >1MB
|
||||
enableResume: acceptRanges,
|
||||
resumeFromBytes: existingLength);
|
||||
|
||||
// Write file (overwrite or append depending on resume)
|
||||
if (response.Body != null && response.Body.Length > 0)
|
||||
{
|
||||
// If resuming, append; otherwise write new
|
||||
var mode = existingLength > 0 ? System.IO.FileMode.Append : System.IO.FileMode.Create;
|
||||
using var fs = new System.IO.FileStream(destinationFilePath, mode, System.IO.FileAccess.Write, System.IO.FileShare.None);
|
||||
await fs.WriteAsync(response.Body, 0, response.Body.Length, cancellationToken);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Custom JSON converter factory that handles case-insensitive enum parsing.
|
||||
/// </summary>
|
||||
private class CaseInsensitiveEnumConverter : System.Text.Json.Serialization.JsonConverterFactory
|
||||
{
|
||||
public override bool CanConvert(Type typeToConvert)
|
||||
{
|
||||
return typeToConvert.IsEnum;
|
||||
}
|
||||
|
||||
public override System.Text.Json.Serialization.JsonConverter? CreateConverter(Type typeToConvert, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
var converterType = typeof(CaseInsensitiveEnumConverterInner<>).MakeGenericType(typeToConvert);
|
||||
return (System.Text.Json.Serialization.JsonConverter?)Activator.CreateInstance(converterType);
|
||||
}
|
||||
|
||||
private class CaseInsensitiveEnumConverterInner<T> : System.Text.Json.Serialization.JsonConverter<T> where T : struct, Enum
|
||||
{
|
||||
public override T Read(ref System.Text.Json.Utf8JsonReader reader, Type typeToConvert, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
switch (reader.TokenType)
|
||||
{
|
||||
case System.Text.Json.JsonTokenType.String:
|
||||
var stringValue = reader.GetString();
|
||||
if (stringValue != null)
|
||||
{
|
||||
foreach (var value in Enum.GetValues<T>())
|
||||
{
|
||||
if (string.Equals(value.ToString(), stringValue, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Log and skip unknown enum value, return default
|
||||
Logging.LogKey(Logging.LogType.Warning, "CaseInsensitiveEnumConverter", $"Unknown enum value \"{stringValue}\" for enum \"{typeToConvert}\". Skipping and using default value.", null, null);
|
||||
return default;
|
||||
|
||||
case System.Text.Json.JsonTokenType.Number:
|
||||
if (reader.TryGetInt32(out int intValue))
|
||||
{
|
||||
return (T)Enum.ToObject(typeToConvert, intValue);
|
||||
}
|
||||
// Log and skip unknown numeric value
|
||||
Logging.LogKey(Logging.LogType.Warning, "CaseInsensitiveEnumConverter", $"Unknown numeric enum value {intValue} for enum \"{typeToConvert}\". Skipping and using default value.", null, null);
|
||||
return default;
|
||||
}
|
||||
Logging.LogKey(Logging.LogType.Warning, "CaseInsensitiveEnumConverter", $"Unexpected token {reader.TokenType} when parsing enum \"{typeToConvert}\". Using default value.", null, null);
|
||||
return default;
|
||||
}
|
||||
|
||||
public override void Write(System.Text.Json.Utf8JsonWriter writer, T value, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
writer.WriteStringValue(value.ToString());
|
||||
}
|
||||
|
||||
public override T ReadAsPropertyName(ref System.Text.Json.Utf8JsonReader reader, Type typeToConvert, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
var stringValue = reader.GetString();
|
||||
if (stringValue != null)
|
||||
{
|
||||
foreach (var value in Enum.GetValues<T>())
|
||||
{
|
||||
if (string.Equals(value.ToString(), stringValue, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Log and skip unknown enum value, return default
|
||||
Logging.LogKey(Logging.LogType.Warning, "CaseInsensitiveEnumConverter", $"Unknown enum value \"{stringValue}\" for enum \"{typeToConvert}\" as property name. Skipping and using default value.", null, null);
|
||||
return default;
|
||||
}
|
||||
|
||||
public override void WriteAsPropertyName(System.Text.Json.Utf8JsonWriter writer, T value, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
writer.WritePropertyName(value.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class FlexibleNumberConverter : System.Text.Json.Serialization.JsonConverterFactory
|
||||
{
|
||||
public override bool CanConvert(Type typeToConvert)
|
||||
{
|
||||
return typeToConvert == typeof(int) || typeToConvert == typeof(int?) ||
|
||||
typeToConvert == typeof(long) || typeToConvert == typeof(long?) ||
|
||||
typeToConvert == typeof(uint) || typeToConvert == typeof(uint?) ||
|
||||
typeToConvert == typeof(ulong) || typeToConvert == typeof(ulong?) ||
|
||||
typeToConvert == typeof(short) || typeToConvert == typeof(short?) ||
|
||||
typeToConvert == typeof(ushort) || typeToConvert == typeof(ushort?) ||
|
||||
typeToConvert == typeof(byte) || typeToConvert == typeof(byte?) ||
|
||||
typeToConvert == typeof(sbyte) || typeToConvert == typeof(sbyte?) ||
|
||||
typeToConvert == typeof(float) || typeToConvert == typeof(float?) ||
|
||||
typeToConvert == typeof(double) || typeToConvert == typeof(double?) ||
|
||||
typeToConvert == typeof(decimal) || typeToConvert == typeof(decimal?);
|
||||
}
|
||||
|
||||
public override System.Text.Json.Serialization.JsonConverter? CreateConverter(Type typeToConvert, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
var converterType = typeof(FlexibleNumberConverterInner<>).MakeGenericType(typeToConvert);
|
||||
return (System.Text.Json.Serialization.JsonConverter?)Activator.CreateInstance(converterType);
|
||||
}
|
||||
|
||||
private class FlexibleNumberConverterInner<T> : System.Text.Json.Serialization.JsonConverter<T>
|
||||
{
|
||||
public override T? Read(ref System.Text.Json.Utf8JsonReader reader, Type typeToConvert, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
try
|
||||
{
|
||||
switch (reader.TokenType)
|
||||
{
|
||||
case System.Text.Json.JsonTokenType.Number:
|
||||
return (T?)Convert.ChangeType(reader.GetDecimal(), Nullable.GetUnderlyingType(typeToConvert) ?? typeToConvert);
|
||||
case System.Text.Json.JsonTokenType.String:
|
||||
var stringValue = reader.GetString();
|
||||
if (string.IsNullOrWhiteSpace(stringValue))
|
||||
return default;
|
||||
return (T?)Convert.ChangeType(stringValue, Nullable.GetUnderlyingType(typeToConvert) ?? typeToConvert);
|
||||
case System.Text.Json.JsonTokenType.Null:
|
||||
return default;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Warning, "FlexibleNumberConverter", $"Failed to convert value to {typeToConvert.Name}: {ex.Message}", null, null);
|
||||
}
|
||||
return default;
|
||||
}
|
||||
|
||||
public override void Write(System.Text.Json.Utf8JsonWriter writer, T? value, System.Text.Json.JsonSerializerOptions options)
|
||||
{
|
||||
if (value == null)
|
||||
writer.WriteNullValue();
|
||||
else
|
||||
writer.WriteRawValue(value.ToString() ?? "null");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
85
gaseous-lib/Classes/HashObject.cs
Normal file
85
gaseous-lib/Classes/HashObject.cs
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents a collection of hash values for a file, including MD5, SHA1, SHA256, and CRC32.
|
||||
/// </summary>
|
||||
public class HashObject
|
||||
{
|
||||
/// <summary>
|
||||
/// The MD5 hash of the file, represented as a lowercase hexadecimal string.
|
||||
/// </summary>
|
||||
public string md5hash { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// The SHA1 hash of the file, represented as a lowercase hexadecimal string.
|
||||
/// </summary>
|
||||
public string sha1hash { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// The SHA256 hash of the file, represented as a lowercase hexadecimal string.
|
||||
/// </summary>
|
||||
public string sha256hash { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// The CRC32 hash of the file, represented as a lowercase hexadecimal string.
|
||||
/// </summary>
|
||||
public string crc32hash { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the HashObject class with empty hash values.
|
||||
/// </summary>
|
||||
public HashObject() { }
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the HashObject class with specified hash values.
|
||||
/// </summary>
|
||||
/// <param name="md5">The MD5 hash value.</param>
|
||||
/// <param name="sha1">The SHA1 hash value.</param>
|
||||
/// <param name="sha256">The SHA256 hash value.</param>
|
||||
/// <param name="crc32">The CRC32 hash value.</param>
|
||||
public HashObject(string md5, string sha1, string sha256, string crc32)
|
||||
{
|
||||
md5hash = md5;
|
||||
sha1hash = sha1;
|
||||
sha256hash = sha256;
|
||||
crc32hash = crc32;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the HashObject class by computing the hash values for the specified file.
|
||||
/// </summary>
|
||||
/// <param name="fileName">The path to the file for which to compute hash values.</param>
|
||||
public HashObject(string fileName)
|
||||
{
|
||||
using var fileStream = File.OpenRead(fileName);
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.hash_file", "hashfile.generating_md5", null, new string[] { fileName });
|
||||
using (var md5 = MD5.Create())
|
||||
{
|
||||
md5hash = BitConverter.ToString(md5.ComputeHash(fileStream)).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.hash_file", "hashfile.generating_sha1", null, new string[] { fileName });
|
||||
fileStream.Position = 0;
|
||||
using (var sha1 = SHA1.Create())
|
||||
{
|
||||
sha1hash = BitConverter.ToString(sha1.ComputeHash(fileStream)).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.hash_file", "hashfile.generating_sha256", null, new string[] { fileName });
|
||||
fileStream.Position = 0;
|
||||
using (var sha256 = SHA256.Create())
|
||||
{
|
||||
sha256hash = BitConverter.ToString(sha256.ComputeHash(fileStream)).Replace("-", "").ToLowerInvariant();
|
||||
}
|
||||
|
||||
Logging.LogKey(Logging.LogType.Information, "process.hash_file", "hashfile.generating_crc32", null, new string[] { fileName });
|
||||
uint crc32HashCalc = CRC32.ComputeFile(fileName);
|
||||
crc32hash = crc32HashCalc.ToString("x8");
|
||||
}
|
||||
}
|
||||
}
|
||||
1232
gaseous-lib/Classes/ImportGames.cs
Normal file
1232
gaseous-lib/Classes/ImportGames.cs
Normal file
File diff suppressed because it is too large
Load diff
639
gaseous-lib/Classes/Localisation.cs
Normal file
639
gaseous-lib/Classes/Localisation.cs
Normal file
|
|
@ -0,0 +1,639 @@
|
|||
using System.Collections.Concurrent;
|
||||
using gaseous_server.Models;
|
||||
using Microsoft.CodeAnalysis.CSharp.Syntax;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides localisation utilities for sanitising locale codes, loading locale files and merging overlay locale data.
|
||||
/// </summary>
|
||||
public static class Localisation
|
||||
{
|
||||
/// <summary>
|
||||
/// The default locale code used by the server.
|
||||
/// </summary>
|
||||
public const string DefaultLocale = "en-AU";
|
||||
|
||||
private static string _currentLocale = Config.ServerLanguage;
|
||||
|
||||
private static ConcurrentDictionary<string, LocaleFileModel> _loadedLocales = new ConcurrentDictionary<string, LocaleFileModel>();
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the current locale code used by the server.
|
||||
/// </summary>
|
||||
/// <exception cref="ArgumentException">Thrown when setting an invalid locale code.</exception>
|
||||
/// <exception cref="FileNotFoundException">Thrown when setting a locale code for which the locale file cannot be found.</exception>
|
||||
/// <remarks>
|
||||
/// When setting the locale, the locale code is sanitised and validated. If the locale file cannot be found, a FileNotFoundException is thrown.
|
||||
/// This value is only updated at server start up and is not intended to be changed at runtime.
|
||||
/// </remarks>
|
||||
public static string CurrentLocale
|
||||
{
|
||||
get
|
||||
{
|
||||
return _currentLocale;
|
||||
}
|
||||
set
|
||||
{
|
||||
// check if the locale is valid
|
||||
string sanitisedLocale = SanitiseLocale(value);
|
||||
if (string.IsNullOrEmpty(sanitisedLocale))
|
||||
{
|
||||
throw new ArgumentException("Invalid locale", nameof(value));
|
||||
}
|
||||
|
||||
// check if the locale file exists
|
||||
try
|
||||
{
|
||||
LocaleFileModel localeFile = GetLanguageFile(sanitisedLocale);
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
throw new FileNotFoundException("Locale file not found", sanitisedLocale);
|
||||
}
|
||||
|
||||
_currentLocale = sanitisedLocale;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a dictionary mapping available locale codes to their native or display names.
|
||||
/// </summary>
|
||||
public static Dictionary<string, string> AvailableLanguages => GetLanguages();
|
||||
|
||||
/// <summary>
|
||||
/// Translates a localisation key using the current locale from either client-facing strings or server-only strings.
|
||||
/// </summary>
|
||||
/// <param name="key">The localisation key to look up.</param>
|
||||
/// <param name="args">Optional formatting arguments applied via string.Format.</param>
|
||||
/// <param name="useServerStrings">If true, lookup is performed against ServerStrings; otherwise against Strings.</param>
|
||||
/// <returns>Translated (and formatted) value or the key when not found.</returns>
|
||||
public static string Translate(string key, string[]? args = null, bool useServerStrings = true)
|
||||
{
|
||||
// check if the current locale is loaded
|
||||
if (!_loadedLocales.ContainsKey(_currentLocale))
|
||||
{
|
||||
GetLanguageFile(_currentLocale);
|
||||
}
|
||||
if (_loadedLocales.TryGetValue(_currentLocale, out var localeFile) && localeFile != null)
|
||||
{
|
||||
var dict = GetActiveDictionary(localeFile, useServerStrings);
|
||||
if (dict != null && dict.TryGetValue(key, out var value))
|
||||
{
|
||||
if (args != null && args.Length > 0)
|
||||
{
|
||||
try { return string.Format(value, args); } catch (FormatException) { return value; }
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return key; // fallback
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Translates a pluralisable localisation key selecting category form from either Strings or ServerStrings.
|
||||
/// </summary>
|
||||
/// <param name="baseKey">Base key without category suffix.</param>
|
||||
/// <param name="count">Numeric count used for rule evaluation.</param>
|
||||
/// <param name="args">Optional formatting arguments.</param>
|
||||
/// <param name="useServerStrings">If true lookup uses ServerStrings; otherwise Strings.</param>
|
||||
/// <returns>Pluralised translation or fallback key string.</returns>
|
||||
public static string TranslatePlural(string baseKey, long count, string[]? args = null, bool useServerStrings = true)
|
||||
{
|
||||
// ensure locale loaded
|
||||
if (!_loadedLocales.ContainsKey(_currentLocale))
|
||||
{
|
||||
GetLanguageFile(_currentLocale);
|
||||
}
|
||||
|
||||
if (!_loadedLocales.TryGetValue(_currentLocale, out var localeFile) || localeFile == null)
|
||||
{
|
||||
return baseKey; // locale not loaded
|
||||
}
|
||||
|
||||
var dict = GetActiveDictionary(localeFile, useServerStrings);
|
||||
if (dict == null)
|
||||
{
|
||||
return baseKey;
|
||||
}
|
||||
|
||||
string? resolvedKey = null;
|
||||
|
||||
// Advanced plural rules: evaluate categories in priority order
|
||||
if (localeFile.PluralRules != null && localeFile.PluralRules.Count > 0)
|
||||
{
|
||||
// Define evaluation order; user may supply subset.
|
||||
string[] order = new[] { "zero", "one", "few", "many", "other" };
|
||||
var matchedCategory = order.FirstOrDefault(cat => localeFile.PluralRules.ContainsKey(cat) && EvaluatePluralRule(localeFile.PluralRules[cat], count));
|
||||
if (matchedCategory != null)
|
||||
{
|
||||
resolvedKey = baseKey + "." + matchedCategory;
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy binary rule fallback if no category matched or no advanced rules
|
||||
if (resolvedKey == null)
|
||||
{
|
||||
string rule = localeFile.PluralRule ?? "n != 1"; // default behaviour
|
||||
bool isPlural = EvaluatePluralRule(rule, count);
|
||||
resolvedKey = isPlural ? baseKey + ".other" : baseKey + ".one";
|
||||
}
|
||||
|
||||
// Attempt retrieval; build fallback chain of alternative categories
|
||||
List<string> fallbackKeys = new List<string>();
|
||||
fallbackKeys.Add(resolvedKey);
|
||||
|
||||
// Add other plural forms for fallback (excluding already added)
|
||||
string[] allCats = { "zero", "one", "few", "many", "other" };
|
||||
foreach (var cat in allCats)
|
||||
{
|
||||
string k = baseKey + "." + cat;
|
||||
if (!fallbackKeys.Contains(k)) fallbackKeys.Add(k);
|
||||
}
|
||||
// baseKey itself
|
||||
fallbackKeys.Add(baseKey);
|
||||
|
||||
string? value = null;
|
||||
foreach (var k in fallbackKeys)
|
||||
{
|
||||
if (dict.TryGetValue(k, out value)) { resolvedKey = k; break; }
|
||||
}
|
||||
|
||||
if (value == null)
|
||||
{
|
||||
return resolvedKey ?? baseKey; // final fallback: category key name or baseKey
|
||||
}
|
||||
|
||||
if (args != null && args.Length > 0)
|
||||
{
|
||||
try { return string.Format(value, args); } catch (FormatException) { return value; }
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns appropriate dictionary (Strings or ServerStrings) based on flag.
|
||||
/// </summary>
|
||||
private static Dictionary<string, string>? GetActiveDictionary(LocaleFileModel localeFile, bool useServerStrings)
|
||||
{
|
||||
if (useServerStrings) return localeFile.ServerStrings;
|
||||
return localeFile.Strings;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates a simple plural rule expression against the given count.
|
||||
/// </summary>
|
||||
/// <param name="expression">Boolean expression referencing 'n'. Example: "n != 1".</param>
|
||||
/// <param name="n">The numeric count.</param>
|
||||
/// <returns>True if expression evaluates to true; otherwise false. Invalid expressions default to (n != 1).</returns>
|
||||
private static bool EvaluatePluralRule(string expression, long n)
|
||||
{
|
||||
// Very small, safe evaluator supporting: n, integers, parentheses, ==, !=, <, <=, >, >=, &&, ||
|
||||
// Tokenize
|
||||
try
|
||||
{
|
||||
var tokens = TokenizeExpression(expression);
|
||||
int index = 0;
|
||||
bool result = ParseOr(tokens, ref index, n);
|
||||
return result;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// fallback rule
|
||||
return n != 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static List<string> TokenizeExpression(string expr)
|
||||
{
|
||||
var tokens = new List<string>();
|
||||
for (int i = 0; i < expr.Length;)
|
||||
{
|
||||
char c = expr[i];
|
||||
if (char.IsWhiteSpace(c)) { i++; continue; }
|
||||
if (char.IsDigit(c))
|
||||
{
|
||||
int start = i;
|
||||
while (i < expr.Length && char.IsDigit(expr[i])) i++;
|
||||
tokens.Add(expr.Substring(start, i - start));
|
||||
continue;
|
||||
}
|
||||
if (char.IsLetter(c))
|
||||
{
|
||||
int start = i;
|
||||
while (i < expr.Length && char.IsLetter(expr[i])) i++;
|
||||
tokens.Add(expr.Substring(start, i - start));
|
||||
continue;
|
||||
}
|
||||
// operators / parentheses
|
||||
if (i + 1 < expr.Length)
|
||||
{
|
||||
string two = expr.Substring(i, 2);
|
||||
if (two == "==" || two == "!=" || two == "<=" || two == ">=" || two == "&&" || two == "||")
|
||||
{
|
||||
tokens.Add(two);
|
||||
i += 2;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
tokens.Add(c.ToString());
|
||||
i++;
|
||||
}
|
||||
return tokens;
|
||||
}
|
||||
|
||||
// Recursive descent parser: or -> and -> comparison -> primary
|
||||
private static bool ParseOr(List<string> tokens, ref int index, long n)
|
||||
{
|
||||
bool left = ParseAnd(tokens, ref index, n);
|
||||
while (index < tokens.Count && tokens[index] == "||")
|
||||
{
|
||||
index++; // skip '||'
|
||||
bool right = ParseAnd(tokens, ref index, n);
|
||||
left = left || right;
|
||||
}
|
||||
return left;
|
||||
}
|
||||
|
||||
private static bool ParseAnd(List<string> tokens, ref int index, long n)
|
||||
{
|
||||
bool left = ParseComparison(tokens, ref index, n);
|
||||
while (index < tokens.Count && tokens[index] == "&&")
|
||||
{
|
||||
index++; // skip '&&'
|
||||
bool right = ParseComparison(tokens, ref index, n);
|
||||
left = left && right;
|
||||
}
|
||||
return left;
|
||||
}
|
||||
|
||||
private static bool ParseComparison(List<string> tokens, ref int index, long n)
|
||||
{
|
||||
long leftValue = ParseValue(tokens, ref index, n);
|
||||
if (index >= tokens.Count) return leftValue != 1; // if no operator, treat number as boolean (non 1 => plural)
|
||||
string op = tokens[index];
|
||||
if (!(op == "==" || op == "!=" || op == "<" || op == "<=" || op == ">" || op == ">="))
|
||||
{
|
||||
// not a comparison operator, treat leftValue
|
||||
return leftValue != 1;
|
||||
}
|
||||
index++; // consume operator
|
||||
long rightValue = ParseValue(tokens, ref index, n);
|
||||
return op switch
|
||||
{
|
||||
"==" => leftValue == rightValue,
|
||||
"!=" => leftValue != rightValue,
|
||||
"<" => leftValue < rightValue,
|
||||
"<=" => leftValue <= rightValue,
|
||||
">" => leftValue > rightValue,
|
||||
">=" => leftValue >= rightValue,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static long ParseValue(List<string> tokens, ref int index, long n)
|
||||
{
|
||||
if (index >= tokens.Count) return 0;
|
||||
string token = tokens[index];
|
||||
if (token == "(")
|
||||
{
|
||||
index++; // consume '('
|
||||
bool inner = ParseOr(tokens, ref index, n);
|
||||
if (index < tokens.Count && tokens[index] == ")") index++; // consume ')'
|
||||
return inner ? 1 : 0;
|
||||
}
|
||||
if (token.Equals("n", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
index++;
|
||||
return n;
|
||||
}
|
||||
if (long.TryParse(token, out var value))
|
||||
{
|
||||
index++;
|
||||
return value;
|
||||
}
|
||||
// unknown token
|
||||
index++;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sanitises a locale string by removing invalid characters and enforcing a standard format.
|
||||
/// </summary>
|
||||
/// <param name="locale">The input locale string to sanitise. The locale can be either the language (example: en), or the language and region (example: en-AU)</param>
|
||||
/// <returns>The sanitised locale string.</returns>
|
||||
/// <remarks>
|
||||
/// This method removes any characters that are not letters, digits, hyphens, or underscores.
|
||||
/// It also ensures that the locale follows the standard format of "language-REGION" where the language is lowercase and the region is uppercase.
|
||||
/// </remarks>
|
||||
public static string SanitiseLocale(string locale)
|
||||
{
|
||||
// remove invalid characters
|
||||
var validChars = locale.Where(c => char.IsLetterOrDigit(c) || c == '-' || c == '_').ToArray();
|
||||
string cleanedLocale = new string(validChars);
|
||||
// split into parts
|
||||
var parts = cleanedLocale.Split(new char[] { '-', '_' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (parts.Length == 0)
|
||||
{
|
||||
return DefaultLocale;
|
||||
}
|
||||
// format parts
|
||||
string language = parts[0].ToLowerInvariant();
|
||||
if (parts.Length == 1)
|
||||
{
|
||||
return language;
|
||||
}
|
||||
string region = parts[1].ToUpperInvariant();
|
||||
return $"{language}-{region}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads and returns the locale file data for the specified locale code, merging overlay locales with their base locale if applicable.
|
||||
/// </summary>
|
||||
/// <param name="locale">Locale code to load (e.g., en or en-AU).</param>
|
||||
/// <returns>A LocaleFileModel representing the loaded localisation data.</returns>
|
||||
/// <exception cref="ArgumentException">Thrown when the provided locale is invalid.</exception>
|
||||
/// <exception cref="FileNotFoundException">Thrown when the locale file (or required base overlay file) cannot be found.</exception>
|
||||
public static LocaleFileModel GetLanguageFile(string locale)
|
||||
{
|
||||
string baseLanguagePath = Path.Combine(Config.LocalisationPath, locale.Split('-')[0] + ".json");
|
||||
string localeLanguagePath = Path.Combine(Config.LocalisationPath, locale + ".json");
|
||||
|
||||
// check if locale is already loaded
|
||||
if (_loadedLocales.ContainsKey(locale))
|
||||
{
|
||||
return _loadedLocales[locale];
|
||||
}
|
||||
|
||||
// load English as fallback
|
||||
// the strings from English should be used if any strings are missing from the requested locale, or the requested locale file cannot be found
|
||||
LocaleFileModel englishLocale;
|
||||
if (!_loadedLocales.ContainsKey("en"))
|
||||
{
|
||||
LocaleFileModel enLocale = LoadLocaleFromResources("en", baseLanguagePath);
|
||||
LocaleFileModel? enFileLocale = null;
|
||||
try
|
||||
{
|
||||
enFileLocale = LoadLocaleFromFile("en");
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
// silent catch
|
||||
}
|
||||
if (enLocale != null && enFileLocale != null)
|
||||
{
|
||||
englishLocale = MergeLocaleFiles(enLocale, enFileLocale);
|
||||
}
|
||||
else if (enFileLocale != null)
|
||||
{
|
||||
englishLocale = enFileLocale;
|
||||
}
|
||||
else
|
||||
{
|
||||
englishLocale = enLocale!; // enLocale must not be null here otherwise resource load failed earlier
|
||||
}
|
||||
if (englishLocale != null)
|
||||
{
|
||||
_loadedLocales.TryAdd("en", englishLocale);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
englishLocale = _loadedLocales["en"];
|
||||
}
|
||||
|
||||
// load the locale file from the embedded resources
|
||||
LocaleFileModel? resourceLocale = null;
|
||||
try
|
||||
{
|
||||
resourceLocale = LoadLocaleFromResources(locale, localeLanguagePath);
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
// silent catch
|
||||
}
|
||||
|
||||
// load the locale file from disk
|
||||
LocaleFileModel? fileLocale = null;
|
||||
try
|
||||
{
|
||||
fileLocale = LoadLocaleFromFile(locale);
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
// silent catch
|
||||
}
|
||||
|
||||
LocaleFileModel? localeData = null;
|
||||
if (resourceLocale != null && fileLocale != null)
|
||||
{
|
||||
// merge the two, with fileLocale taking precedence
|
||||
localeData = MergeLocaleFiles(resourceLocale, fileLocale);
|
||||
}
|
||||
else if (fileLocale != null)
|
||||
{
|
||||
localeData = fileLocale;
|
||||
}
|
||||
else if (resourceLocale != null)
|
||||
{
|
||||
localeData = resourceLocale;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new FileNotFoundException("Locale file not found", locale);
|
||||
}
|
||||
|
||||
// merge the localeData with English to ensure all strings are present
|
||||
if (localeData.Code != "en")
|
||||
{
|
||||
if (englishLocale != null)
|
||||
{
|
||||
localeData = MergeLocaleFiles(englishLocale, localeData);
|
||||
}
|
||||
// if englishLocale unexpectedly null, skip merge (localeData already loaded)
|
||||
}
|
||||
|
||||
// check if locale is defined in loaded locales cache, and add it if not
|
||||
_loadedLocales.AddOrUpdate(locale, localeData, (key, oldValue) => localeData);
|
||||
|
||||
return localeData;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a dictionary of available languages/locales supported by the server.
|
||||
/// </summary>
|
||||
/// <returns>A dictionary where the key is the locale code and the value is the native name of the language.</returns>
|
||||
public static Dictionary<string, string> GetLanguages()
|
||||
{
|
||||
var languages = new Dictionary<string, string>();
|
||||
|
||||
// get a list of all embedded resource locale files
|
||||
var assembly = System.Reflection.Assembly.GetExecutingAssembly();
|
||||
var resourceNames = assembly.GetManifestResourceNames();
|
||||
foreach (var resourceName in resourceNames)
|
||||
{
|
||||
if (resourceName.StartsWith("gaseous_lib.Support.Localisation.") && resourceName.EndsWith(".json"))
|
||||
{
|
||||
string localeCode = resourceName.Substring("gaseous_lib.Support.Localisation.".Length);
|
||||
localeCode = localeCode.Substring(0, localeCode.Length - ".json".Length);
|
||||
try
|
||||
{
|
||||
LocaleFileModel localeFile = GetLanguageFile(localeCode);
|
||||
if (localeFile != null && !languages.ContainsKey(localeFile.Code))
|
||||
{
|
||||
languages.Add(localeFile.Code, localeFile.NativeName ?? localeFile.Name ?? localeFile.Code);
|
||||
}
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// skip invalid locale files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// get a list of all locale files on disk
|
||||
if (Directory.Exists(Config.LocalisationPath))
|
||||
{
|
||||
var files = Directory.GetFiles(Config.LocalisationPath, "*.json");
|
||||
foreach (var filePath in files)
|
||||
{
|
||||
string fileName = Path.GetFileNameWithoutExtension(filePath);
|
||||
try
|
||||
{
|
||||
LocaleFileModel localeFile = GetLanguageFile(fileName);
|
||||
if (localeFile != null && !languages.ContainsKey(localeFile.Code))
|
||||
{
|
||||
languages.Add(localeFile.Code, localeFile.NativeName ?? localeFile.Name ?? localeFile.Code);
|
||||
}
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// skip invalid locale files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// sort languages by native name
|
||||
languages = languages.OrderBy(kvp => kvp.Value).ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
|
||||
|
||||
return languages;
|
||||
}
|
||||
|
||||
private static LocaleFileModel LoadLocaleFromResources(string locale, string outputPath)
|
||||
{
|
||||
string resourceName = "gaseous_lib.Support.Localisation." + locale + ".json";
|
||||
using (var stream = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream(resourceName))
|
||||
{
|
||||
if (stream == null)
|
||||
{
|
||||
throw new FileNotFoundException("Locale file not found in resources", locale);
|
||||
}
|
||||
|
||||
using (var reader = new StreamReader(stream))
|
||||
{
|
||||
string json = reader.ReadToEnd();
|
||||
LocaleFileModel? localeFile = Newtonsoft.Json.JsonConvert.DeserializeObject<LocaleFileModel>(json);
|
||||
if (localeFile == null)
|
||||
{
|
||||
throw new InvalidDataException("Failed to deserialize locale resource JSON: " + resourceName);
|
||||
}
|
||||
|
||||
// write the resource locale to disk if it doesn't already exist, to allow users to easily modify and create overlay locales based on it
|
||||
if (!System.IO.File.Exists(outputPath))
|
||||
{
|
||||
File.WriteAllText(outputPath, json);
|
||||
}
|
||||
|
||||
if (localeFile.Type == LocaleFileModel.LocaleFileType.Overlay)
|
||||
{
|
||||
// load the base locale from resources
|
||||
LocaleFileModel baseLocale = LoadLocaleFromResources(localeFile.ParentLanguage, outputPath);
|
||||
localeFile = MergeLocaleFiles(baseLocale, localeFile);
|
||||
}
|
||||
|
||||
return localeFile;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static LocaleFileModel? LoadLocaleFromFile(string locale)
|
||||
{
|
||||
string filePath = Path.Combine(Config.LocalisationPath, locale + ".json");
|
||||
if (!System.IO.File.Exists(filePath))
|
||||
{
|
||||
Logging.LogKey(Logging.LogType.Information, "Localisation", $"Locale file not found on disk for locale '{locale}': {filePath}");
|
||||
return null;
|
||||
}
|
||||
|
||||
string json = System.IO.File.ReadAllText(filePath);
|
||||
LocaleFileModel? localeFile = Newtonsoft.Json.JsonConvert.DeserializeObject<LocaleFileModel>(json);
|
||||
if (localeFile == null)
|
||||
{
|
||||
throw new InvalidDataException("Failed to deserialize locale file JSON: " + filePath);
|
||||
}
|
||||
|
||||
if (localeFile.Type == LocaleFileModel.LocaleFileType.Overlay)
|
||||
{
|
||||
// load the base locale from file
|
||||
LocaleFileModel baseLocale = LoadLocaleFromFile(localeFile.ParentLanguage);
|
||||
localeFile = MergeLocaleFiles(baseLocale, localeFile);
|
||||
}
|
||||
|
||||
return localeFile;
|
||||
}
|
||||
|
||||
private static LocaleFileModel MergeLocaleFiles(LocaleFileModel baseLocale, LocaleFileModel overlayLocale)
|
||||
{
|
||||
LocaleFileModel mergedLocale = new LocaleFileModel
|
||||
{
|
||||
Name = overlayLocale.Name ?? baseLocale.Name,
|
||||
NativeName = overlayLocale.NativeName ?? baseLocale.NativeName,
|
||||
Code = overlayLocale.Code ?? baseLocale.Code,
|
||||
PluralRule = overlayLocale.PluralRule ?? baseLocale.PluralRule,
|
||||
Direction = overlayLocale.Direction ?? baseLocale.Direction,
|
||||
Strings = new Dictionary<string, string>(),
|
||||
ServerStrings = new Dictionary<string, string>()
|
||||
};
|
||||
|
||||
// add base strings
|
||||
if (baseLocale.Strings != null)
|
||||
{
|
||||
mergedLocale.Strings ??= new Dictionary<string, string>();
|
||||
foreach (var kvp in baseLocale.Strings)
|
||||
{
|
||||
mergedLocale.Strings[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
// add base server strings
|
||||
if (baseLocale.ServerStrings != null)
|
||||
{
|
||||
foreach (var kvp in baseLocale.ServerStrings)
|
||||
{
|
||||
mergedLocale.ServerStrings[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
// overlay strings
|
||||
if (overlayLocale.Strings != null)
|
||||
{
|
||||
foreach (var kvp in overlayLocale.Strings)
|
||||
{
|
||||
mergedLocale.Strings[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
// overlay server strings
|
||||
if (overlayLocale.ServerStrings != null)
|
||||
{
|
||||
foreach (var kvp in overlayLocale.ServerStrings)
|
||||
{
|
||||
mergedLocale.ServerStrings[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
return mergedLocale;
|
||||
}
|
||||
}
|
||||
}
|
||||
504
gaseous-lib/Classes/Logging.cs
Normal file
504
gaseous-lib/Classes/Logging.cs
Normal file
|
|
@ -0,0 +1,504 @@
|
|||
// ----------------------------------------------------------------------------
|
||||
// File: Logging.cs
|
||||
// Project: gaseous-server
|
||||
// Description:
|
||||
// Centralized logging utilities for the Gaseous server. Supports writing
|
||||
// to console (with ANSI colour when not running as a Windows Service),
|
||||
// Windows Event Log (when running as a service on Windows), relational
|
||||
// database persistence (MySQL), and disk-based fallback / retention.
|
||||
//
|
||||
// Key features:
|
||||
// * Asynchronous log write to prevent blocking caller.
|
||||
// * Conditional debug logging based on configuration.
|
||||
// * Automatic correlation / context capture (CorrelationId, CallingUser,
|
||||
// CallingProcess) via CallContext when available.
|
||||
// * Disk retention sweep that purges aged log files based on configured
|
||||
// retention period.
|
||||
// * Graceful degradation: failures to write to DB or Event Log fall back
|
||||
// to disk storage.
|
||||
//
|
||||
// NOTE: This class mixes infrastructure concerns (DB, filesystem, EventLog).
|
||||
// Future refactoring could separate providers (e.g., ILogSink) to
|
||||
// improve testability and single responsibility.
|
||||
// ----------------------------------------------------------------------------
|
||||
using System;
|
||||
using System.Data;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
using System.Reflection.Metadata.Ecma335;
|
||||
using gaseous_server.Classes.Plugins;
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
/// <summary>
|
||||
/// Provides logging utilities for the server including methods to emit log entries
|
||||
/// to console, Windows Event Log, a database table, and disk. Supports contextual
|
||||
/// enrichment and log retrieval with filtering.
|
||||
/// </summary>
|
||||
public class Logging
|
||||
{
|
||||
/// <summary>
|
||||
/// When set to true, all logging operations are forced to disk (bypassing DB and Event Log).
|
||||
/// </summary>
|
||||
public static bool WriteToDiskOnly { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Adds a log entry using localisation keys for process and message, translating internally.
|
||||
/// Prefer this overload for new code instead of performing Localisation.Translate at call sites.
|
||||
/// </summary>
|
||||
/// <param name="eventType">Severity / classification of the log entry.</param>
|
||||
/// <param name="processKey">Localisation key for the logical process/component (server strings).</param>
|
||||
/// <param name="messageKey">Localisation key for the log message (server strings).</param>
|
||||
/// <param name="processArgs">Optional formatting arguments for process translation.</param>
|
||||
/// <param name="messageArgs">Optional formatting arguments for message translation.</param>
|
||||
/// <param name="exceptionValue">Optional exception to record.</param>
|
||||
/// <param name="logToDiskOnly">If true, bypass non-disk sinks.</param>
|
||||
/// <param name="additionalData">Optional structured metadata.</param>
|
||||
public static void LogKey(LogType eventType, string processKey, string messageKey, string[]? processArgs = null, string[]? messageArgs = null, Exception? exceptionValue = null, bool logToDiskOnly = false, Dictionary<string, object>? additionalData = null)
|
||||
{
|
||||
string resolvedProcess = SafeTranslate(processKey, processArgs);
|
||||
string resolvedMessage = SafeTranslate(messageKey, messageArgs);
|
||||
|
||||
LogItem logItem = new LogItem
|
||||
{
|
||||
EventTime = DateTime.UtcNow,
|
||||
EventType = eventType,
|
||||
Process = resolvedProcess,
|
||||
Message = resolvedMessage,
|
||||
AdditionalData = additionalData ?? new Dictionary<string, object>(),
|
||||
ExceptionValue = Common.ReturnValueIfNull(exceptionValue, "").ToString()
|
||||
};
|
||||
|
||||
#pragma warning disable CS0618
|
||||
_ = Task.Run(() => WriteLogAsync(logItem, logToDiskOnly));
|
||||
#pragma warning restore CS0618
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Safely translates a localisation key (server strings) with optional formatting arguments.
|
||||
/// Returns the key itself if translation fails or key not found.
|
||||
/// </summary>
|
||||
private static string SafeTranslate(string key, string[]? args)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(key)) return "";
|
||||
string value = Localisation.Translate(key, args, true);
|
||||
return value ?? key;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return key; // fallback to key literal
|
||||
}
|
||||
}
|
||||
|
||||
static List<gaseous_server.Classes.Plugins.LogProviders.ILogProvider> _logProviders = new List<Plugins.LogProviders.ILogProvider>();
|
||||
static List<gaseous_server.Classes.Plugins.LogProviders.ILogProvider> _logReaderProviders = new List<Plugins.LogProviders.ILogProvider>();
|
||||
static bool _providersInitialized = false;
|
||||
|
||||
/// <summary>
|
||||
/// Static constructor to initialize log providers once at startup.
|
||||
/// Performs reflection-based discovery of all available log providers.
|
||||
/// </summary>
|
||||
static Logging()
|
||||
{
|
||||
InitializeLogProviders();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes log providers by discovering all classes implementing ILogProvider.
|
||||
/// Filters by supported operating systems and capabilities.
|
||||
/// </summary>
|
||||
static void InitializeLogProviders()
|
||||
{
|
||||
if (_providersInitialized) return;
|
||||
|
||||
try
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var pluginType = typeof(Plugins.LogProviders.ILogProvider);
|
||||
|
||||
var pluginTypes = assembly.GetTypes()
|
||||
.Where(t => t.IsClass && !t.IsAbstract && pluginType.IsAssignableFrom(t))
|
||||
.ToList();
|
||||
|
||||
foreach (var type in pluginTypes)
|
||||
{
|
||||
try
|
||||
{
|
||||
var plugin = Activator.CreateInstance(type) as Plugins.LogProviders.ILogProvider;
|
||||
if (plugin != null && plugin.SupportedOperatingSystems.Contains(Plugins.PluginManagement.GetCurrentOperatingSystem()))
|
||||
{
|
||||
_logProviders.Add(plugin);
|
||||
|
||||
if (plugin.SupportsLogFetch)
|
||||
{
|
||||
_logReaderProviders.Add(plugin);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// log provider failed to load - skip
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
_providersInitialized = true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a context value from CallContext with safe fallback.
|
||||
/// </summary>
|
||||
/// <param name="key">The context key to retrieve.</param>
|
||||
/// <returns>The context value or empty string if not found or error occurs.</returns>
|
||||
static string GetContextValue(string key)
|
||||
{
|
||||
try
|
||||
{
|
||||
var ctxValue = CallContext.GetData(key);
|
||||
return ctxValue?.ToString() ?? "";
|
||||
}
|
||||
catch
|
||||
{
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handles the actual persistence / output of the log item to the configured sinks.
|
||||
/// Applies filtering (e.g., debug on/off), formats console output, writes to DB, Windows Event Log,
|
||||
/// and disk as needed.
|
||||
/// </summary>
|
||||
/// <param name="logItem">The populated log item to write.</param>
|
||||
/// <param name="LogToDiskOnly">Overrides sink selection to force disk-only behavior.</param>
|
||||
static async Task WriteLogAsync(LogItem logItem, bool LogToDiskOnly)
|
||||
{
|
||||
if (logItem.EventType == LogType.Debug && !Config.LoggingConfiguration.DebugLogging)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (WriteToDiskOnly || LogToDiskOnly)
|
||||
{
|
||||
var diskLogProvider = new Plugins.LogProviders.TextFileProvider();
|
||||
_ = diskLogProvider.LogMessage(logItem, null);
|
||||
var consoleLogProvider = new Plugins.LogProviders.ConsoleProvider();
|
||||
_ = consoleLogProvider.LogMessage(logItem, null);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Pull ambient context values if they have been set for correlation / tracing.
|
||||
logItem.CorrelationId = GetContextValue("CorrelationId");
|
||||
logItem.CallingProcess = GetContextValue("CallingProcess");
|
||||
logItem.CallingUser = GetContextValue("CallingUser");
|
||||
|
||||
// send log to each provider
|
||||
foreach (var provider in _logProviders)
|
||||
{
|
||||
try
|
||||
{
|
||||
_ = provider.LogMessage(logItem, null);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// log provider failed to log - write to disk log as fallback
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves log entries from providers that support log fetching, applying pagination and optional filtering criteria
|
||||
/// (event types, time range, full-text search on message, correlation/user/process constraints).
|
||||
/// </summary>
|
||||
/// <param name="model">Query model specifying filters and paging.</param>
|
||||
/// <returns>List of matching <see cref="LogItem"/> instances from all reader-capable providers.</returns>
|
||||
public static async Task<List<LogItem>> GetLogs(LogsViewModel model)
|
||||
{
|
||||
if (_logReaderProviders.Count == 0)
|
||||
{
|
||||
return new List<LogItem>();
|
||||
}
|
||||
|
||||
var logItems = new List<LogItem>();
|
||||
foreach (var provider in _logReaderProviders)
|
||||
{
|
||||
try
|
||||
{
|
||||
var providerLogs = await provider.GetLogMessages(model);
|
||||
logItems.AddRange(providerLogs);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// log provider failed to fetch logs - skip
|
||||
}
|
||||
}
|
||||
return logItems;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runs maintenance tasks for all log providers, such as purging aged logs.
|
||||
/// </summary>
|
||||
public async static Task RunMaintenance()
|
||||
{
|
||||
foreach (var provider in _logProviders)
|
||||
{
|
||||
try
|
||||
{
|
||||
await provider.RunMaintenance();
|
||||
}
|
||||
catch
|
||||
{
|
||||
// log provider failed to run maintenance - skip
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Shuts down all log providers, performing any necessary cleanup.
|
||||
/// </summary>
|
||||
public static void ShutdownLogProviders()
|
||||
{
|
||||
foreach (var provider in _logProviders)
|
||||
{
|
||||
try
|
||||
{
|
||||
provider.Shutdown();
|
||||
}
|
||||
catch
|
||||
{
|
||||
// log provider failed to shutdown - skip
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classification / severity of a log entry.
|
||||
/// </summary>
|
||||
public enum LogType
|
||||
{
|
||||
/// <summary>
|
||||
/// Standard informational event representing normal application flow.
|
||||
/// </summary>
|
||||
Information = 0,
|
||||
/// <summary>
|
||||
/// Verbose diagnostic message intended to aid debugging; may be filtered.
|
||||
/// </summary>
|
||||
Debug = 1,
|
||||
/// <summary>
|
||||
/// Non-critical issue that may need attention but does not stop execution.
|
||||
/// </summary>
|
||||
Warning = 2,
|
||||
/// <summary>
|
||||
/// Critical failure or exception requiring immediate attention.
|
||||
/// </summary>
|
||||
Critical = 3
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single log entry including metadata, message content, optional exception, and contextual identifiers.
|
||||
/// </summary>
|
||||
public class LogItem
|
||||
{
|
||||
/// <summary>
|
||||
/// Database identity / primary key (if sourced from persistence layer).
|
||||
/// </summary>
|
||||
public long Id { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp indicating when the event occurred / was captured.
|
||||
/// </summary>
|
||||
public DateTime EventTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity / classification of the log. Nullable to handle transitional deserialization scenarios.
|
||||
/// </summary>
|
||||
public LogType? EventType { get; set; }
|
||||
|
||||
private LogTypeInfo? _cachedEventTypeInfo;
|
||||
|
||||
/// <summary>
|
||||
/// Provides detailed information about the event type, including its string representation and console color.
|
||||
/// Cached to avoid creating new instances on repeated access.
|
||||
/// </summary>
|
||||
[Newtonsoft.Json.JsonIgnore]
|
||||
[System.Text.Json.Serialization.JsonIgnore]
|
||||
public LogTypeInfo EventTypeInfo
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_cachedEventTypeInfo == null)
|
||||
{
|
||||
_cachedEventTypeInfo = new LogTypeInfo(EventType ?? LogType.Information);
|
||||
}
|
||||
return _cachedEventTypeInfo;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Contains information about the log type, including its severity, string representation, and console color.
|
||||
/// Uses static lookup dictionaries for efficient O(1) access to type mappings.
|
||||
/// </summary>
|
||||
public class LogTypeInfo
|
||||
{
|
||||
// Static lookup tables for O(1) access instead of switch statements
|
||||
private static readonly Dictionary<LogType, string> TypeStringMap = new Dictionary<LogType, string>
|
||||
{
|
||||
{ LogType.Information, "INFO" },
|
||||
{ LogType.Warning, "WARN" },
|
||||
{ LogType.Critical, "CRIT" },
|
||||
{ LogType.Debug, "DBUG" }
|
||||
};
|
||||
|
||||
private static readonly Dictionary<LogType, ConsoleColor> ColourMap = new Dictionary<LogType, ConsoleColor>
|
||||
{
|
||||
{ LogType.Information, ConsoleColor.Blue },
|
||||
{ LogType.Warning, ConsoleColor.Yellow },
|
||||
{ LogType.Critical, ConsoleColor.Red },
|
||||
{ LogType.Debug, ConsoleColor.Gray }
|
||||
};
|
||||
|
||||
private static readonly Dictionary<LogType, string> ColourEscapeMap = new Dictionary<LogType, string>
|
||||
{
|
||||
{ LogType.Information, "\u001b[34m" }, // Blue
|
||||
{ LogType.Warning, "\u001b[33m" }, // Yellow
|
||||
{ LogType.Critical, "\u001b[31m" }, // Red
|
||||
{ LogType.Debug, "\u001b[37m" } // Gray/White
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="LogTypeInfo"/> class with the specified log type.
|
||||
/// </summary>
|
||||
/// <param name="type">The log type to associate with this info instance.</param>
|
||||
public LogTypeInfo(LogType type)
|
||||
{
|
||||
this._type = type;
|
||||
}
|
||||
|
||||
private LogType _type { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The type of log entry.
|
||||
/// </summary>
|
||||
public LogType Type { get { return this._type; } }
|
||||
|
||||
/// <summary>
|
||||
/// String representation of the log type for easy logging/display (e.g., "INFO", "WARN", "CRIT", "DBUG").
|
||||
/// Retrieved from static lookup dictionary for optimal performance.
|
||||
/// </summary>
|
||||
public string TypeString
|
||||
{
|
||||
get => TypeStringMap.TryGetValue(_type, out var value) ? value : "INFO";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The console color associated with the log type for colorized output.
|
||||
/// Retrieved from static lookup dictionary for optimal performance.
|
||||
/// </summary>
|
||||
public ConsoleColor Colour
|
||||
{
|
||||
get => ColourMap.TryGetValue(_type, out var value) ? value : ConsoleColor.Blue;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The ANSI escape code for the log type color for colorized console output.
|
||||
/// Retrieved from static lookup dictionary for optimal performance.
|
||||
/// </summary>
|
||||
public string ColourEscape
|
||||
{
|
||||
get => ColourEscapeMap.TryGetValue(_type, out var value) ? value : "\u001b[34m";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The ANSI escape code to reset console color to default.
|
||||
/// </summary>
|
||||
public string DefaultConsoleColourEscape => "\u001b[0m";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Logical process or component emitting the log (e.g., controller name, worker identifier).
|
||||
/// </summary>
|
||||
public string Process { get; set; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Ambient correlation identifier used to link related operations across components.
|
||||
/// </summary>
|
||||
public string CorrelationId { get; set; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Name of the calling process (if supplied via context) for cross-system traceability.
|
||||
/// </summary>
|
||||
public string? CallingProcess { get; set; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Identifier / email of the user associated with the log (if available).
|
||||
/// </summary>
|
||||
public string? CallingUser { get; set; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Message body of the log entry. Set-only wrapper allows future validation / transformation.
|
||||
/// </summary>
|
||||
public string Message { get; set; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Raw exception details captured for the event, usually stack trace and message.
|
||||
/// </summary>
|
||||
public string? ExceptionValue { get; set; } = "";
|
||||
|
||||
/// <summary>
|
||||
/// Arbitrary additional structured data associated with the log (serialized when persisted).
|
||||
/// </summary>
|
||||
public Dictionary<string, object> AdditionalData { get; set; } = new Dictionary<string, object>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Query model for retrieving logs with paging and filter criteria.
|
||||
/// </summary>
|
||||
public class LogsViewModel
|
||||
{
|
||||
/// <summary>
|
||||
/// Upper bound (exclusive) on log Id for incremental pagination (e.g., infinite scroll).
|
||||
/// </summary>
|
||||
public long? StartIndex { get; set; }
|
||||
/// <summary>
|
||||
/// 1-based page index.
|
||||
/// </summary>
|
||||
public int PageNumber { get; set; } = 1;
|
||||
/// <summary>
|
||||
/// Number of records to return per page.
|
||||
/// </summary>
|
||||
public int PageSize { get; set; } = 100;
|
||||
/// <summary>
|
||||
/// Set of log types to include; empty set means all types.
|
||||
/// </summary>
|
||||
public List<LogType> Status { get; set; } = new List<LogType>();
|
||||
/// <summary>
|
||||
/// Optional inclusive start of time range filter.
|
||||
/// </summary>
|
||||
public DateTime? StartDateTime { get; set; }
|
||||
/// <summary>
|
||||
/// Optional inclusive end of time range filter.
|
||||
/// </summary>
|
||||
public DateTime? EndDateTime { get; set; }
|
||||
/// <summary>
|
||||
/// Full-text search term applied to message field.
|
||||
/// </summary>
|
||||
public string? SearchText { get; set; }
|
||||
/// <summary>
|
||||
/// Filter by exact correlation identifier.
|
||||
/// </summary>
|
||||
public string? CorrelationId { get; set; }
|
||||
/// <summary>
|
||||
/// Filter by calling process.
|
||||
/// </summary>
|
||||
public string? CallingProcess { get; set; }
|
||||
/// <summary>
|
||||
/// Filter by calling user (email/identifier).
|
||||
/// </summary>
|
||||
public string? CallingUser { get; set; }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,23 +1,25 @@
|
|||
using System;
|
||||
using System.Data;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Models;
|
||||
using Microsoft.VisualStudio.Web.CodeGeneration;
|
||||
|
||||
namespace gaseous_server.Classes
|
||||
{
|
||||
public class Maintenance : QueueItemStatus
|
||||
public class Maintenance : QueueItemStatus
|
||||
{
|
||||
const int MaxFileAge = 30;
|
||||
|
||||
public void RunDailyMaintenance()
|
||||
public async Task RunDailyMaintenance()
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
|
||||
// remove any entries from the library that have an invalid id
|
||||
Logging.LogKey(Logging.LogType.Information, "process.maintenance", "maintenance.removing_invalid_library_entries");
|
||||
string LibraryWhereClause = "";
|
||||
foreach (GameLibrary.LibraryItem library in GameLibrary.GetLibraries)
|
||||
foreach (GameLibrary.LibraryItem library in await GameLibrary.GetLibraries())
|
||||
{
|
||||
if (LibraryWhereClause.Length > 0)
|
||||
{
|
||||
|
|
@ -28,14 +30,20 @@ namespace gaseous_server.Classes
|
|||
string sqlLibraryWhereClause = "";
|
||||
if (LibraryWhereClause.Length > 0)
|
||||
{
|
||||
DataTable affectedMetadataMaps = await db.ExecuteCMDAsync("SELECT DISTINCT MetadataMapId FROM Games_Roms WHERE LibraryId NOT IN ( " + LibraryWhereClause + " ) AND MetadataMapId IS NOT NULL;");
|
||||
sqlLibraryWhereClause = "DELETE FROM Games_Roms WHERE LibraryId NOT IN ( " + LibraryWhereClause + " );";
|
||||
db.ExecuteCMD(sqlLibraryWhereClause);
|
||||
await db.ExecuteCMDAsync(sqlLibraryWhereClause);
|
||||
MetadataManagement.UpdateRomCounts(affectedMetadataMaps.AsEnumerable().Where(row => row["MetadataMapId"] != DBNull.Value).Select(row => (long)row["MetadataMapId"]));
|
||||
}
|
||||
|
||||
// delete old logs
|
||||
sql = "DELETE FROM ServerLogs WHERE EventTime < @EventRetentionDate;";
|
||||
dbDict.Add("EventRetentionDate", DateTime.UtcNow.AddDays(Config.LoggingConfiguration.LogRetention * -1));
|
||||
db.ExecuteCMD(sql, dbDict);
|
||||
// update rom counts
|
||||
Logging.LogKey(Logging.LogType.Information, "process.maintenance", "maintenance.updating_rom_counts");
|
||||
MetadataManagement metadataGame = new MetadataManagement(this);
|
||||
metadataGame.UpdateRomCounts();
|
||||
|
||||
// run log maintenance
|
||||
Logging.LogKey(Logging.LogType.Information, "process.maintenance", "maintenance.running_log_maintenance");
|
||||
await Logging.RunMaintenance();
|
||||
|
||||
// delete files and directories older than 7 days in PathsToClean
|
||||
List<string> PathsToClean = new List<string>();
|
||||
|
|
@ -44,7 +52,7 @@ namespace gaseous_server.Classes
|
|||
|
||||
foreach (string PathToClean in PathsToClean)
|
||||
{
|
||||
Logging.Log(Logging.LogType.Information, "Maintenance", "Removing files older than " + MaxFileAge + " days from " + PathToClean);
|
||||
Logging.LogKey(Logging.LogType.Information, "process.maintenance", "maintenance.removing_files_older_than_days_from_path", null, new string[] { MaxFileAge.ToString(), PathToClean });
|
||||
|
||||
// get content
|
||||
// files first
|
||||
|
|
@ -53,7 +61,7 @@ namespace gaseous_server.Classes
|
|||
FileInfo fileInfo = new FileInfo(filePath);
|
||||
if (fileInfo.LastWriteTimeUtc.AddDays(MaxFileAge) < DateTime.UtcNow)
|
||||
{
|
||||
Logging.Log(Logging.LogType.Warning, "Maintenance", "Deleting file " + filePath);
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.maintenance", "maintenance.deleting_file", null, new string[] { filePath });
|
||||
File.Delete(filePath);
|
||||
}
|
||||
}
|
||||
|
|
@ -64,30 +72,30 @@ namespace gaseous_server.Classes
|
|||
DirectoryInfo directoryInfo = new DirectoryInfo(dirPath);
|
||||
if (directoryInfo.LastWriteTimeUtc.AddDays(MaxFileAge) < DateTime.UtcNow)
|
||||
{
|
||||
Logging.Log(Logging.LogType.Warning, "Maintenance", "Deleting directory " + directoryInfo);
|
||||
Logging.LogKey(Logging.LogType.Warning, "process.maintenance", "maintenance.deleting_directory", null, new string[] { directoryInfo.ToString() });
|
||||
Directory.Delete(dirPath, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void RunWeeklyMaintenance()
|
||||
public async Task RunWeeklyMaintenance()
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
|
||||
Logging.Log(Logging.LogType.Information, "Maintenance", "Optimising database tables");
|
||||
Logging.LogKey(Logging.LogType.Information, "process.maintenance", "maintenance.optimising_database_tables");
|
||||
sql = "SHOW FULL TABLES WHERE Table_Type = 'BASE TABLE';";
|
||||
DataTable tables = db.ExecuteCMD(sql);
|
||||
DataTable tables = await db.ExecuteCMDAsync(sql);
|
||||
|
||||
int StatusCounter = 1;
|
||||
foreach (DataRow row in tables.Rows)
|
||||
{
|
||||
SetStatus(StatusCounter, tables.Rows.Count, "Optimising table " + row[0].ToString());
|
||||
|
||||
sql = "OPTIMIZE TABLE " + row[0].ToString();
|
||||
DataTable response = db.ExecuteCMD(sql);
|
||||
sql = "OPTIMIZE TABLE `" + row[0].ToString() + "`;";
|
||||
DataTable response = await db.ExecuteCMDAsync(sql, new Dictionary<string, object>(), 240);
|
||||
foreach (DataRow responseRow in response.Rows)
|
||||
{
|
||||
string retVal = "";
|
||||
|
|
@ -95,7 +103,7 @@ namespace gaseous_server.Classes
|
|||
{
|
||||
retVal += responseRow.ItemArray[i] + "; ";
|
||||
}
|
||||
Logging.Log(Logging.LogType.Information, "Maintenance", "(" + StatusCounter + "/" + tables.Rows.Count + "): Optimise table " + row[0].ToString() + ": " + retVal);
|
||||
Logging.LogKey(Logging.LogType.Information, "process.maintenance", "maintenance.optimise_table_status", null, new string[] { StatusCounter.ToString(), tables.Rows.Count.ToString(), row[0].ToString(), retVal });
|
||||
}
|
||||
|
||||
StatusCounter += 1;
|
||||
425
gaseous-lib/Classes/Metadata/AgeGroups.cs
Normal file
425
gaseous-lib/Classes/Metadata/AgeGroups.cs
Normal file
|
|
@ -0,0 +1,425 @@
|
|||
using System;
|
||||
using System.Reflection;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Models;
|
||||
using gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes;
|
||||
using Microsoft.CodeAnalysis.Classification;
|
||||
using gaseous_server.Classes.Plugins.MetadataProviders;
|
||||
|
||||
namespace gaseous_server.Classes.Metadata
|
||||
{
|
||||
public class AgeGroups
|
||||
{
|
||||
public AgeGroups()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public static async Task<AgeGroup?> GetAgeGroup(Game? game)
|
||||
{
|
||||
if (game == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
else
|
||||
{
|
||||
gaseous_server.Classes.Plugins.MetadataProviders.Storage Storage = new gaseous_server.Classes.Plugins.MetadataProviders.Storage(game.MetadataSource);
|
||||
|
||||
Storage.CacheStatus? cacheStatus = await Storage.GetCacheStatusAsync("AgeGroup", (long)game.Id);
|
||||
|
||||
AgeGroup? RetVal = new AgeGroup();
|
||||
|
||||
switch (cacheStatus)
|
||||
{
|
||||
case Storage.CacheStatus.NotPresent:
|
||||
case Storage.CacheStatus.Expired:
|
||||
RetVal = await _GetAgeGroup(game);
|
||||
if (RetVal != null)
|
||||
{
|
||||
await Storage.StoreCacheValue<AgeGroup>(RetVal);
|
||||
}
|
||||
break;
|
||||
|
||||
case Storage.CacheStatus.Current:
|
||||
RetVal = await Storage.GetCacheValue<AgeGroup>(RetVal, "Id", game.Id);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new InvalidOperationException("Unexpected cache status encountered in GetAgeGroup.");
|
||||
}
|
||||
|
||||
return RetVal;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<AgeGroup?> _GetAgeGroup(Game game)
|
||||
{
|
||||
// compile the maximum age group for the given game
|
||||
if (game != null)
|
||||
{
|
||||
if (game.AgeRatings != null)
|
||||
{
|
||||
if (game.AgeRatings != null)
|
||||
{
|
||||
// collect ratings values from metadata
|
||||
List<AgeRating> ageRatings = new List<AgeRating>();
|
||||
foreach (long ratingId in game.AgeRatings)
|
||||
{
|
||||
AgeRating? rating = await AgeRatings.GetAgeRating(game.MetadataSource, ratingId);
|
||||
if (rating != null)
|
||||
{
|
||||
ageRatings.Add(rating);
|
||||
}
|
||||
}
|
||||
|
||||
// compile the ratings values into the ratings groups
|
||||
AgeRestrictionGroupings highestAgeGroup = GetAgeGroupFromAgeRatings(ageRatings);
|
||||
|
||||
// return the compiled ratings group
|
||||
AgeGroup ageGroup = new AgeGroup();
|
||||
ageGroup.Id = game.Id;
|
||||
ageGroup.GameId = game.Id;
|
||||
if (highestAgeGroup == 0)
|
||||
{
|
||||
ageGroup.AgeGroupId = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
ageGroup.AgeGroupId = highestAgeGroup;
|
||||
}
|
||||
|
||||
return ageGroup;
|
||||
}
|
||||
else
|
||||
{
|
||||
AgeGroup ageGroup = new AgeGroup();
|
||||
ageGroup.Id = game.Id;
|
||||
ageGroup.GameId = game.Id;
|
||||
ageGroup.AgeGroupId = null;
|
||||
|
||||
return ageGroup;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
AgeGroup ageGroup = new AgeGroup();
|
||||
ageGroup.Id = game.Id;
|
||||
ageGroup.GameId = game.Id;
|
||||
ageGroup.AgeGroupId = null;
|
||||
|
||||
return ageGroup;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the highest age restriction grouping from a list of age ratings.
|
||||
/// </summary>
|
||||
/// <param name="ageRatings">The list of age ratings to evaluate.</param>
|
||||
/// <returns>The highest <see cref="AgeRestrictionGroupings"/> found in the ratings.</returns>
|
||||
public static AgeRestrictionGroupings GetAgeGroupFromAgeRatings(List<AgeRating> ageRatings)
|
||||
{
|
||||
AgeRestrictionGroupings highestAgeGroup = AgeRestrictionGroupings.Unclassified;
|
||||
|
||||
foreach (AgeRating ageRating in ageRatings)
|
||||
{
|
||||
var (ratingsBoardName, boardRatingName) = GetBoardAndRatingNames(ageRating);
|
||||
|
||||
if (ratingsBoardName != null && boardRatingName != null)
|
||||
{
|
||||
var group = GetMatchingAgeGroup(ratingsBoardName, boardRatingName);
|
||||
if (group.HasValue && group.Value > highestAgeGroup)
|
||||
{
|
||||
highestAgeGroup = group.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return highestAgeGroup;
|
||||
}
|
||||
|
||||
private static (string? ratingsBoardName, string? boardRatingName) GetBoardAndRatingNames(AgeRating ageRating)
|
||||
{
|
||||
long ratingBoard = ageRating.Organization;
|
||||
long ratingValue = ageRating.RatingCategory;
|
||||
|
||||
var boardEntry = AgeGroupMap.RatingBoards
|
||||
.FirstOrDefault(b => b.Value.IGDBId == ratingBoard);
|
||||
|
||||
if (!string.IsNullOrEmpty(boardEntry.Key))
|
||||
{
|
||||
string ratingsBoardName = boardEntry.Key;
|
||||
var ratingEntry = boardEntry.Value.Ratings
|
||||
.FirstOrDefault(r => r.Value.IGDBId == ratingValue);
|
||||
|
||||
if (!string.IsNullOrEmpty(ratingEntry.Key))
|
||||
{
|
||||
string boardRatingName = ratingEntry.Key;
|
||||
return (ratingsBoardName, boardRatingName);
|
||||
}
|
||||
}
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
private static AgeRestrictionGroupings? GetMatchingAgeGroup(string ratingsBoardName, string boardRatingName)
|
||||
{
|
||||
foreach (var ageGroup in AgeGroupMap.AgeGroups)
|
||||
{
|
||||
if (ageGroup.Value.Ratings.ContainsKey(ratingsBoardName) &&
|
||||
ageGroup.Value.Ratings[ratingsBoardName].Contains(boardRatingName))
|
||||
{
|
||||
return ageGroup.Key;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static Dictionary<string, object> GetAgeRatingAndGroupings()
|
||||
{
|
||||
// get age ratings dictionary
|
||||
Dictionary<long, string> ClassificationBoardsStrings = new Dictionary<long, string>();
|
||||
Dictionary<long, string> AgeRatingsStrings = new Dictionary<long, string>();
|
||||
foreach (var ratingBoardKey in AgeGroups.AgeGroupMap.RatingBoards.Keys)
|
||||
{
|
||||
ClassificationBoardsStrings.Add(
|
||||
(long)AgeGroups.AgeGroupMap.RatingBoards[ratingBoardKey].IGDBId,
|
||||
ratingBoardKey
|
||||
);
|
||||
|
||||
foreach (var ageRatingKey in AgeGroups.AgeGroupMap.RatingBoards[ratingBoardKey].Ratings.Keys)
|
||||
{
|
||||
AgeRatingsStrings.Add(
|
||||
(long)AgeGroups.AgeGroupMap.RatingBoards[ratingBoardKey].Ratings[ageRatingKey].IGDBId,
|
||||
AgeGroups.AgeGroupMap.RatingBoards[ratingBoardKey].Ratings[ageRatingKey].IGDBName
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Dictionary<string, Dictionary<string, List<long>>> AgeGroupsStrings = new Dictionary<string, Dictionary<string, List<long>>>();
|
||||
foreach (var ageGroupKey in AgeGroups.AgeGroupMap.AgeGroups.Keys)
|
||||
{
|
||||
Dictionary<string, List<long>> ageGroupRatings = new Dictionary<string, List<long>>();
|
||||
foreach (var ratingBoardKey in AgeGroups.AgeGroupMap.AgeGroups[ageGroupKey].Ratings.Keys)
|
||||
{
|
||||
List<long> ageRatingIds = new List<long>();
|
||||
foreach (var ageRatingItem in AgeGroups.AgeGroupMap.AgeGroups[ageGroupKey].Ratings[ratingBoardKey])
|
||||
{
|
||||
if (AgeGroups.AgeGroupMap.RatingBoards.ContainsKey(ratingBoardKey))
|
||||
{
|
||||
if (AgeGroups.AgeGroupMap.RatingBoards[ratingBoardKey].Ratings.ContainsKey(ageRatingItem))
|
||||
{
|
||||
ageRatingIds.Add((long)AgeGroups.AgeGroupMap.RatingBoards[ratingBoardKey].Ratings[ageRatingItem].IGDBId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ageRatingIds.Count > 0)
|
||||
{
|
||||
ageGroupRatings.Add(ratingBoardKey, ageRatingIds);
|
||||
}
|
||||
}
|
||||
|
||||
if (ageGroupRatings.Count > 0)
|
||||
{
|
||||
AgeGroupsStrings.Add(ageGroupKey.ToString(), ageGroupRatings);
|
||||
}
|
||||
}
|
||||
|
||||
// create the final dictionary
|
||||
Dictionary<string, object> ageRatingAndGroupings = new Dictionary<string, object>
|
||||
{
|
||||
{ "ClassificationBoards", ClassificationBoardsStrings },
|
||||
{ "AgeRatings", AgeRatingsStrings },
|
||||
{ "AgeGroups", AgeGroupsStrings }
|
||||
};
|
||||
|
||||
return ageRatingAndGroupings;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an age group for a game, including its ID, associated game ID, and age restriction grouping.
|
||||
/// </summary>
|
||||
public class AgeGroup
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the unique identifier for the age group.
|
||||
/// </summary>
|
||||
public long? Id { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the unique identifier for the associated game.
|
||||
/// </summary>
|
||||
public long? GameId { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the age restriction grouping for the age group.
|
||||
/// </summary>
|
||||
public AgeRestrictionGroupings? AgeGroupId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the possible age restriction groupings for games.
|
||||
/// </summary>
|
||||
public enum AgeRestrictionGroupings
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents games suitable only for adults.
|
||||
/// </summary>
|
||||
Adult = 4,
|
||||
/// <summary>
|
||||
/// Represents games suitable for mature audiences.
|
||||
/// </summary>
|
||||
Mature = 3,
|
||||
/// <summary>
|
||||
/// Represents games suitable for teenagers.
|
||||
/// </summary>
|
||||
Teen = 2,
|
||||
/// <summary>
|
||||
/// Represents games suitable for children.
|
||||
/// </summary>
|
||||
Child = 1,
|
||||
/// <summary>
|
||||
/// Represents games that are unclassified.
|
||||
/// </summary>
|
||||
Unclassified = 0
|
||||
}
|
||||
|
||||
private static AgeGroupMapModel? _ageGroupMap { get; set; } = null;
|
||||
/// <summary>
|
||||
/// Gets the age group map model, loading from file or embedded resource if necessary.
|
||||
/// </summary>
|
||||
public static AgeGroupMapModel AgeGroupMap
|
||||
{
|
||||
get
|
||||
{
|
||||
// if _ageGroupMap is null:
|
||||
// - check if a file named "AgeGroupMap.json" exists at "~/Metadata/AgeGroupMap.json"
|
||||
// - if it exists, read the file and deserialize it into _ageGroupMap
|
||||
// - if it does not exist, read AgeGroupMap.json from the embedded resources and deserialize it into _ageGroupMap
|
||||
if (_ageGroupMap == null)
|
||||
{
|
||||
string filePath = Path.Combine(Config.LibraryConfiguration.LibraryRootDirectory, "AgeGroupMap.json");
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
string json = File.ReadAllText(filePath);
|
||||
_ageGroupMap = Newtonsoft.Json.JsonConvert.DeserializeObject<AgeGroupMapModel>(json);
|
||||
}
|
||||
else
|
||||
{
|
||||
// using Stream? stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("gaseous_server.wwwroot.images.Ratings.AgeGroupMap.json");
|
||||
using Stream? stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("gaseous_lib.Support.AgeGroupMap.json");
|
||||
if (stream != null)
|
||||
{
|
||||
using StreamReader reader = new StreamReader(stream);
|
||||
string json = reader.ReadToEnd();
|
||||
_ageGroupMap = Newtonsoft.Json.JsonConvert.DeserializeObject<AgeGroupMapModel>(json, new Newtonsoft.Json.JsonSerializerSettings
|
||||
{
|
||||
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
|
||||
DefaultValueHandling = Newtonsoft.Json.DefaultValueHandling.Ignore,
|
||||
MaxDepth = 10
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
// Could not find AgeGroupMap.json in embedded resources; handle gracefully
|
||||
_ageGroupMap = new AgeGroupMapModel();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return _ageGroupMap;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the mapping model for age groups, including group definitions and ratings boards.
|
||||
/// </summary>
|
||||
public class AgeGroupMapModel
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the dictionary mapping age restriction groupings to their corresponding age group models.
|
||||
/// </summary>
|
||||
public Dictionary<AgeRestrictionGroupings, AgeGroupsModel> AgeGroups { get; set; } = new Dictionary<AgeRestrictionGroupings, AgeGroupsModel>();
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the dictionary of ratings boards, keyed by their names.
|
||||
/// </summary>
|
||||
public Dictionary<string, RatingBoardModel> RatingBoards { get; set; } = new Dictionary<string, RatingBoardModel>();
|
||||
|
||||
/// <summary>
|
||||
/// Represents an age group model containing its ID and associated ratings.
|
||||
/// </summary>
|
||||
public class AgeGroupsModel
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the unique identifier for the age group.
|
||||
/// </summary>
|
||||
public long Id { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the ratings associated with the age group, organized by ratings board.
|
||||
/// </summary>
|
||||
public Dictionary<string, List<string>> Ratings { get; set; } = new Dictionary<string, List<string>>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a ratings board model containing ratings boards and their associated items.
|
||||
/// </summary>
|
||||
public class RatingBoardModel
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the IGDB identifier for the ratings board item.
|
||||
/// </summary>
|
||||
public long? IGDBId { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the name of the ratings board item.
|
||||
/// </summary>
|
||||
public string? Name { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the short name of the ratings board item.
|
||||
/// </summary>
|
||||
public string? ShortName { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the description of the ratings board item.
|
||||
/// </summary>
|
||||
public string? Description { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the website URL for the ratings board item.
|
||||
/// </summary>
|
||||
public string? Website { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the dictionary of ratings for this ratings board item, keyed by rating name.
|
||||
/// </summary>
|
||||
public Dictionary<string, RatingsItemModel> Ratings { get; set; } = new Dictionary<string, RatingsItemModel>();
|
||||
|
||||
/// <summary>
|
||||
/// Represents a rating item within a ratings board, including its IGDB ID, name, description, and icon name.
|
||||
/// </summary>
|
||||
public class RatingsItemModel
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the IGDB identifier for the rating item.
|
||||
/// </summary>
|
||||
public long? IGDBId { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the IGDB name for the rating item.
|
||||
/// </summary>
|
||||
public string? IGDBName { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the name of the rating item.
|
||||
/// </summary>
|
||||
public string? Name { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the description of the rating item.
|
||||
/// </summary>
|
||||
public string? Description { get; set; }
|
||||
/// <summary>
|
||||
/// Gets or sets the icon name associated with the rating item.
|
||||
/// </summary>
|
||||
public string? IconName { get; set; }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
126
gaseous-lib/Classes/Metadata/AgeRating.cs
Normal file
126
gaseous-lib/Classes/Metadata/AgeRating.cs
Normal file
|
|
@ -0,0 +1,126 @@
|
|||
using System;
|
||||
using System.Buffers;
|
||||
using System.Reflection;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes;
|
||||
using Microsoft.CodeAnalysis.Classification;
|
||||
|
||||
namespace gaseous_server.Classes.Metadata
|
||||
{
|
||||
public class AgeRatings
|
||||
{
|
||||
public AgeRatings()
|
||||
{
|
||||
}
|
||||
|
||||
public static async Task<AgeRating?> GetAgeRating(FileSignature.MetadataSources SourceType, long? Id)
|
||||
{
|
||||
if ((Id == 0) || (Id == null))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
else
|
||||
{
|
||||
AgeRating? RetVal = await Metadata.GetMetadataAsync<AgeRating>(SourceType, (long)Id, false);
|
||||
return RetVal;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<GameAgeRating> GetConsolidatedAgeRating(FileSignature.MetadataSources SourceType, long RatingId)
|
||||
{
|
||||
GameAgeRating gameAgeRating = new GameAgeRating();
|
||||
|
||||
AgeRating ageRating = await GetAgeRating(SourceType, RatingId);
|
||||
if (ageRating == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
gameAgeRating.Id = (long)ageRating.Id;
|
||||
gameAgeRating.RatingBoard = await AgeRatingOrganizations.GetAgeRatingOrganization(SourceType, ageRating.Organization);
|
||||
gameAgeRating.RatingTitle = await AgeRatingCategorys.GetAgeRatingCategory(SourceType, ageRating.RatingCategory);
|
||||
|
||||
List<AgeRatingContentDescription> descriptions = new List<AgeRatingContentDescription>();
|
||||
if (ageRating.RatingContentDescriptions != null)
|
||||
{
|
||||
foreach (long ContentId in ageRating.RatingContentDescriptions)
|
||||
{
|
||||
try
|
||||
{
|
||||
AgeRatingContentDescription ageRatingContentDescription = await AgeRatingContentDescriptions.GetAgeRatingContentDescriptions(SourceType, ContentId);
|
||||
descriptions.Add(ageRatingContentDescription);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine(ex.Message);
|
||||
}
|
||||
}
|
||||
}
|
||||
gameAgeRating.Descriptions = descriptions.ToArray();
|
||||
|
||||
return gameAgeRating;
|
||||
}
|
||||
|
||||
public class GameAgeRating
|
||||
{
|
||||
public long Id { get; set; }
|
||||
public AgeRatingOrganization RatingBoard { get; set; }
|
||||
public AgeRatingCategory RatingTitle { get; set; }
|
||||
public AgeRatingContentDescription[] Descriptions { get; set; }
|
||||
}
|
||||
|
||||
public static async Task PopulateAgeMapAsync()
|
||||
{
|
||||
Database db = new Database(Database.databaseType.MySql, Config.DatabaseConfiguration.ConnectionString);
|
||||
string sql = "DELETE FROM ClassificationMap;";
|
||||
Dictionary<string, object> dbDict = new Dictionary<string, object>();
|
||||
db.ExecuteNonQuery(sql);
|
||||
|
||||
// loop all AgeRestrictionGroupings enums and store each item in a string
|
||||
foreach (AgeGroups.AgeRestrictionGroupings AgeRestrictionGroup in Enum.GetValues(typeof(AgeGroups.AgeRestrictionGroupings))) // example Adult, Teen, etc
|
||||
{
|
||||
if (AgeRestrictionGroup != AgeGroups.AgeRestrictionGroupings.Unclassified)
|
||||
{
|
||||
int ageRestrictionGroupValue = (int)AgeRestrictionGroup;
|
||||
|
||||
// loop all AgeGroups in the AgeGroups.AgeGroupMap
|
||||
foreach (var ratingBoard in AgeGroups.AgeGroupMap.AgeGroups[AgeRestrictionGroup].Ratings.Keys)
|
||||
{
|
||||
// collect ratings for this AgeRestrictionGroup
|
||||
if (AgeGroups.AgeGroupMap.RatingBoards.ContainsKey(ratingBoard))
|
||||
{
|
||||
var ratingBoardItem = AgeGroups.AgeGroupMap.RatingBoards[ratingBoard];
|
||||
long ratingBoardId = (long)ratingBoardItem.IGDBId;
|
||||
|
||||
// loop all ratings for this rating board
|
||||
foreach (var rating in AgeGroups.AgeGroupMap.AgeGroups[AgeRestrictionGroup].Ratings[ratingBoard])
|
||||
{
|
||||
if (ratingBoardItem.Ratings.ContainsKey(rating))
|
||||
{
|
||||
long ratingId = (long)ratingBoardItem.Ratings[rating].IGDBId;
|
||||
|
||||
// insert into ClassificationMap
|
||||
sql = "INSERT INTO ClassificationMap (AgeGroupId, ClassificationBoardId, RatingId) VALUES (@ageGroupId, @classificationBoardId, @ratingId);";
|
||||
dbDict.Clear();
|
||||
dbDict.Add("@ageGroupId", ageRestrictionGroupValue);
|
||||
dbDict.Add("@classificationBoardId", ratingBoardId);
|
||||
dbDict.Add("@ratingId", ratingId);
|
||||
|
||||
try
|
||||
{
|
||||
db.ExecuteNonQuery(sql, dbDict);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($"Error inserting into ClassificationMap: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
27
gaseous-lib/Classes/Metadata/AgeRatingCategory.cs
Normal file
27
gaseous-lib/Classes/Metadata/AgeRatingCategory.cs
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using gaseous_server.Classes.Plugins.MetadataProviders.MetadataTypes;
|
||||
|
||||
|
||||
namespace gaseous_server.Classes.Metadata
|
||||
{
|
||||
public class AgeRatingCategorys
|
||||
{
|
||||
public AgeRatingCategorys()
|
||||
{
|
||||
}
|
||||
|
||||
public static async Task<AgeRatingCategory?> GetAgeRatingCategory(FileSignature.MetadataSources SourceType, long? Id)
|
||||
{
|
||||
if ((Id == 0) || (Id == null))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
else
|
||||
{
|
||||
AgeRatingCategory? RetVal = await Metadata.GetMetadataAsync<AgeRatingCategory>(SourceType, (long)Id, false);
|
||||
return RetVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue