diff --git a/assets/css/openfusion.css b/assets/css/openfusion.css index 667b96b..c8bc8d8 100644 --- a/assets/css/openfusion.css +++ b/assets/css/openfusion.css @@ -148,6 +148,14 @@ select { color: #fff; } +.modal { + overflow-y: auto; +} + +.invalidinput { + border-color: #ff0000; +} + button:disabled { cursor: not-allowed; pointer-events: all !important; diff --git a/assets/js/server-selector.js b/assets/js/server-selector.js index 074e61f..6ca490e 100644 --- a/assets/js/server-selector.js +++ b/assets/js/server-selector.js @@ -83,6 +83,36 @@ function setAppVersionText() { $("#of-versionnumber").text("v" + getAppVersion()); } +function validateServerSave(modalName) { + // works everytime a key is entered into the server save form + var descInput = document.getElementById(modalName + "server-descinput"); + var ipInput = document.getElementById(modalName + "server-ipinput"); + var button = document.getElementById(modalName + "server-savebutton"); + var valid = true; + + descInput.classList.remove("invalidinput"); + ipInput.classList.remove("invalidinput"); + + if ( + descInput.value.length < parseInt(descInput.getAttribute("minlength")) || + descInput.value.length > parseInt(descInput.getAttribute("maxlength")) + ) { + descInput.classList.add("invalidinput"); + valid = false; + } + + if (!(new RegExp(ipInput.getAttribute("pattern"))).test(ipInput.value)) { + ipInput.classList.add("invalidinput"); + valid = false; + } + + if (valid) { + button.removeAttribute("disabled"); + } else { + button.setAttribute("disabled", ""); + } +} + function addServer() { var jsonToModify = JSON.parse(remotefs.readFileSync(serversPath)); @@ -149,6 +179,36 @@ function restoreDefaultServers() { loadServerList(); } +function validateVersionSave(modalName) { + // works everytime a key is entered into the server save form + var nameInput = document.getElementById(modalName + "version-nameinput"); + var urlInput = document.getElementById(modalName + "version-urlinput"); + var button = document.getElementById(modalName + "version-savebutton"); + var valid = true; + + nameInput.classList.remove("invalidinput"); + urlInput.classList.remove("invalidinput"); + + if ( + nameInput.value.length < parseInt(nameInput.getAttribute("minlength")) || + nameInput.value.length > parseInt(nameInput.getAttribute("maxlength")) + ) { + nameInput.classList.add("invalidinput"); + valid = false; + } + + if (!(new RegExp(urlInput.getAttribute("pattern"))).test(urlInput.value)) { + urlInput.classList.add("invalidinput"); + valid = false; + } + + if (valid) { + button.removeAttribute("disabled"); + } else { + button.setAttribute("disabled", ""); + } +} + function addVersion() { var jsonToModify = JSON.parse(remotefs.readFileSync(versionsPath)); @@ -168,16 +228,16 @@ function addVersion() { if (matchingVersions.length > 0) return; - jsonToModify["versions"].push(version); + jsonToModify["versions"].unshift(version); remotefs.writeFileSync(versionsPath, JSON.stringify(jsonToModify, null, 4)); loadCacheList(); - startHashCheck(true); + handleCache("hash-check", version["name"]); } function editVersion() { var jsonToModify = JSON.parse(remotefs.readFileSync(versionsPath)); - var edited = false; + var editedVersionString = null; $.each(jsonToModify["versions"], function (key, value) { if (value["name"] == getSelectedVersion() && !defaultHashes.hasOwnProperty(value["name"])) { @@ -189,15 +249,15 @@ function editVersion() { $("#editversion-urlinput").val().length == 0 ? value["url"] : $("#editversion-urlinput").val(); - edited = true; + editedVersionString = value["name"]; } }); - if (!edited) return; + if (!editedVersionString) return; remotefs.writeFileSync(versionsPath, JSON.stringify(jsonToModify, null, 4)); loadCacheList(); - startHashCheck(true); + handleCache("hash-check", editedVersionString); } function deleteVersion() { @@ -215,7 +275,7 @@ function deleteVersion() { remotefs.writeFileSync(versionsPath, JSON.stringify(jsonToModify, null, 4)); loadCacheList(); - startHashCheck(true); + delete cacheSizes[result.name]; } function restoreDefaultVersions() { @@ -224,7 +284,7 @@ function restoreDefaultVersions() { versionsPath ); loadCacheList(); - startHashCheck(true); + handleCache("hash-check"); } function loadGameVersions() { @@ -270,6 +330,10 @@ function loadServerList() { // No servers are added, make sure placeholder is visible $("#server-listing-placeholder").attr("hidden", false); } + + // Check these once to get them into the correct state + validateServerSave("add"); + validateServerSave("edit"); } function loadCacheList() { @@ -304,12 +368,19 @@ function loadCacheList() { $("#cache-tablebody").append(row); }); + + storageLoadingStart(); + storageLoadingUpdate(cacheSizes); + storageLoadingComplete(cacheSizes); + + // Check these once to get them into the correct state + validateVersionSave("add"); + validateVersionSave("edit"); } -function startHashCheck(forced) { - // only run once unless forced - if (forced || !cacheSizes) - handleCache("hash-check"); +function startHashCheck() { + // only run once + if (!cacheSizes) handleCache("hash-check"); } function getCacheElemID(versionString, cacheMode, elementName) { @@ -364,7 +435,11 @@ function getCacheInfoCell(versionString, cacheMode) { var labelCache = document.createElement("label"); labelCache.setAttribute("id", labelID); labelCache.setAttribute("for", divID); - labelCache.innerHTML = getCacheLabelText(); + labelCache.innerHTML = getCacheLabelText( + (cacheSizes && cacheSizes[versionString]) ? + cacheSizes[versionString][cacheMode] : + null + ); var divCacheButtons = document.createElement("div"); divCacheButtons.setAttribute("id", labelID); @@ -494,9 +569,7 @@ function handleCache(operation, versionString, cacheMode, callback) { var versions = versionArray.filter(function (obj) { return obj.name === versionString; }); - var cdnRoot = (versions.length === 0) ? - cdnString : - path.dirname(versions[0].url); + var cdnRoot = (versions.length === 0) ? cdnString : versions[0].url; var lastSizes = { intact: 0, altered: 0, total: 0 }; var buf = ""; @@ -528,14 +601,16 @@ function handleCache(operation, versionString, cacheMode, callback) { path.join(__dirname, "lib", "cache_handler.exe"), [ "--operation", operation, + // roots below contain version-agnostic main directories for caches "--playable-root", cacheRoot, "--offline-root", offlineRoot, "--user-dir", userData, + // CDN root contains version-specific directory, unless cacheMode is "all" "--cdn-root", cdnRoot, "--cache-mode", cacheMode || "all", "--cache-version", versionString || "all", "--port", server.address().port, - "--permanent-caches" + "--official-caches" ].concat(Object.keys(defaultHashes)), { stdio: "inherit" @@ -644,8 +719,9 @@ function prepGameInfo(serverUUID) { // For writing loginInfo.php, assetInfo.php, etc. function setGameInfo(serverInfo, versionURL) { - window.assetUrl = versionURL; // game-client.js needs to access this - console.log("Cache will expand from " + versionURL); + var versionURLRoot = versionURL.endsWith("/") ? versionURL : versionURL + "/"; + window.assetUrl = versionURLRoot; // game-client.js needs to access this + console.log("Cache will expand from " + versionURLRoot); remotefs.writeFileSync(path.join(__dirname, "assetInfo.php"), assetUrl); if (serverInfo.hasOwnProperty("endpoint")) { diff --git a/cache_handler/cache_handler.py b/cache_handler/cache_handler.py index 23a5c08..5c9539b 100644 --- a/cache_handler/cache_handler.py +++ b/cache_handler/cache_handler.py @@ -48,7 +48,7 @@ class FileInfo: local_root=self.local_root, url_root=self.url_root, current_local_path=(self.current_local_path / suffix), - current_url=(self.url_root.rstrip('/') + '/' + suffix.lstrip('/')), + current_url=(self.current_url.rstrip('/') + '/' + suffix.lstrip('/')), sha256=(sha256 or self.sha256), ) @@ -63,7 +63,7 @@ class FileInfo: class FileInfoGroup: version: str mode: str - permanent: bool + is_official: bool local_root: Path url_root: str file_info_list: List[FileInfo] @@ -164,8 +164,11 @@ async def hash_check_unregistered(writer: asyncio.StreamWriter, file_info_groups sha256='', ) + path_set = {str(fi.current_local_path.resolve()) + for fi in file_info_group.file_info_list} + for file_path in file_info_group.local_root.glob('**/*'): - if any(file_path.samefile(file_info) for file_info in file_info_group.file_info_list): + if file_path.is_dir() or str(file_path.resolve()) in path_set: continue # assume file is intact @@ -180,7 +183,10 @@ async def download_unregistered_file_all(writer: asyncio.StreamWriter, file_info remote_path = Path(file_info.current_url.replace('file:', '', 1).lstrip('/')) for file_path in remote_path.glob('**/*'): - new_file_info = file_info.resolve_full(file_path) + if file_path.is_dir(): + continue + + new_file_info = file_info.resolve(file_path.relative_to(remote_path).as_posix()) new_file_info.current_local_path.parent.mkdir(parents=True, exist_ok=True) shutil.copy(file_path, new_file_info.current_local_path) @@ -189,8 +195,6 @@ async def download_unregistered_file_all(writer: asyncio.StreamWriter, file_info await register_size_and_hash(new_file_info) await send_message(writer) - await send_message(writer) - async def download_unregistered_http_all( writer: asyncio.StreamWriter, @@ -204,7 +208,10 @@ async def download_unregistered_http_all( file_info.current_local_path.mkdir(exist_ok=True) - page = await httpx.get(file_info.current_url).content + response = await client.get(file_info.current_url) + response.raise_for_status() + + page = response.content bs = BeautifulSoup(page, 'html.parser') links = bs.find_all('a', href=True) @@ -350,7 +357,7 @@ async def hash_check(writer: asyncio.StreamWriter, file_info_groups: List[FileIn if file_info_group.file_info_list] unregistered_groups = [file_info_group for file_info_group in file_info_groups - if not file_info_group.permanent] + if not file_info_group.is_official] if registered_groups: await hash_check_registered(writer, registered_groups, update_freq=update_freq) @@ -373,7 +380,7 @@ async def download(writer: asyncio.StreamWriter, file_info_groups: List[FileInfo if file_info_group.file_info_list] unregistered_groups = [file_info_group for file_info_group in file_info_groups - if not file_info_group.permanent] + if not file_info_group.is_official] async with httpx.AsyncClient(limits=httpx.Limits(max_connections=max_connections), timeout=httpx.Timeout(None)) as client: @@ -394,10 +401,10 @@ async def delete(writer: asyncio.StreamWriter, file_info_groups: List[FileInfoGr """ registered_groups = [file_info_group for file_info_group in file_info_groups - if file_info_group.permanent] + if file_info_group.is_official] unregistered_groups = [file_info_group for file_info_group in file_info_groups - if not file_info_group.permanent] + if not file_info_group.is_official] if registered_groups: await delete_registered(writer, registered_groups) @@ -455,7 +462,11 @@ def compile_file_lists(args: Namespace) -> List[FileInfoGroup]: local_root = args.offline_root if cache_mode == 'offline' else args.playable_root local_dir = swapped_path(local_root, args.user_dir, cache_version, cache_mode) - url_dir = args.cdn_root.rstrip('/') + '/' + cache_version.lstrip('/') + url_dir = ( + args.cdn_root.rstrip('/') + '/' + cache_version.lstrip('/') + if args.cache_version == 'all' else + args.cdn_root + ) file_info_version = FileInfo( version=cache_version, @@ -484,7 +495,7 @@ def compile_file_lists(args: Namespace) -> List[FileInfoGroup]: file_info_groups.append(FileInfoGroup( version=cache_version, mode=cache_mode, - permanent=(cache_version in args.permanent_caches), + is_official=(cache_version in args.official_caches), local_root=local_dir, url_root=url_dir, file_info_list=file_info_list, @@ -493,6 +504,21 @@ def compile_file_lists(args: Namespace) -> List[FileInfoGroup]: return file_info_groups +def write_hash_updates(args: Namespace) -> None: + if not hash_dict_updated: + return + + for version_name in hash_dict: + if version_name in args.official_caches: + continue + + for cache_mode in ['playable', 'offline']: + hash_dict[version_name][cache_mode] = dict(sorted(hash_dict[version_name][cache_mode].items())) + + with open(Path(args.user_dir) / 'hashes.json', 'w') as w: + json.dump(hash_dict, w, indent=4) + + async def prep_and_run_coroutine(args: Namespace) -> None: file_info_groups = compile_file_lists(args) @@ -504,20 +530,16 @@ async def prep_and_run_coroutine(args: Namespace) -> None: 'fix': download, 'delete': delete, } - await coroutines[args.operation](writer, file_info_groups) + + try: + await coroutines[args.operation](writer, file_info_groups) + finally: + await send_message(writer) writer.close() await writer.wait_closed() - if hash_dict_updated: - print('Updated hashes.json!') - # todo: prettify - for version_name in hash_dict: - if version_name not in args.permanent_caches: - hash_dict[version_name]['playable'] = dict(sorted(hash_dict[version_name]['playable'].items())) - hash_dict[version_name]['offline'] = dict(sorted(hash_dict[version_name]['offline'].items())) - with open(Path(args.user_dir) / 'hashes.json', 'w') as w: - json.dump(hash_dict, w, indent=4) + write_hash_updates(args) def parse_args() -> Namespace: @@ -530,7 +552,7 @@ def parse_args() -> Namespace: parser.add_argument('--cache-mode', dest='cache_mode', type=str, default='all', choices=['all', 'offline', 'playable']) parser.add_argument('--cache-version', dest='cache_version', type=str, default='all') parser.add_argument('--port', type=str, required=True) - parser.add_argument('--permanent-caches', dest='permanent_caches', nargs='*', type=str, default=[]) + parser.add_argument('--official-caches', dest='official_caches', nargs='*', type=str, default=[]) return parser.parse_args() diff --git a/cache_handler/cache_handler.spec b/cache_handler/cache_handler.spec index be4018d..6c24636 100644 --- a/cache_handler/cache_handler.spec +++ b/cache_handler/cache_handler.spec @@ -1,37 +1,33 @@ # -*- mode: python ; coding: utf-8 -*- +block_cipher = None -a = Analysis( - ['cache_handler.py'], - pathex=[], - binaries=[], - datas=[], - hiddenimports=[], - hookspath=[], - hooksconfig={}, - runtime_hooks=[], - excludes=[], - noarchive=False, -) -pyz = PYZ(a.pure) -exe = EXE( - pyz, - a.scripts, - a.binaries, - a.datas, - [], - name='cache_handler', - debug=False, - bootloader_ignore_signals=False, - strip=False, - upx=True, - upx_exclude=[], - runtime_tmpdir=None, - console=True, - disable_windowed_traceback=False, - argv_emulation=False, - target_arch=None, - codesign_identity=None, - entitlements_file=None, -) +a = Analysis(['cache_handler.py'], + pathex=['Z:\\src'], + binaries=[], + datas=[], + hiddenimports=[], + hookspath=[], + runtime_hooks=[], + excludes=[], + win_no_prefer_redirects=False, + win_private_assemblies=False, + cipher=block_cipher, + noarchive=False) +pyz = PYZ(a.pure, a.zipped_data, + cipher=block_cipher) +exe = EXE(pyz, + a.scripts, + a.binaries, + a.zipfiles, + a.datas, + [], + name='cache_handler', + debug=False, + bootloader_ignore_signals=False, + strip=False, + upx=True, + upx_exclude=[], + runtime_tmpdir=None, + console=True ) diff --git a/index.html b/index.html index 8d3e27a..8af6ade 100644 --- a/index.html +++ b/index.html @@ -258,6 +258,7 @@ required="" minlength="1" maxlength="70" + oninput="validateServerSave('add')" /> @@ -330,6 +333,7 @@ required="" minlength="1" maxlength="70" + oninput="validateServerSave('edit')" /> @@ -584,6 +590,7 @@ required="" minlength="1" maxlength="70" + oninput="validateVersionSave('add')" /> @@ -638,7 +646,7 @@