Compare commits

...

29 Commits

Author SHA1 Message Date
9deba1956f [rankendpoint] Add invocation instructions 2023-12-24 21:35:35 +01:00
1a3a530c7f [rankendpoint] Get DB path and endpoint route from env vars
Also reverted the use of main() that made it incompatible with uwsgi and
flask run.
2023-12-24 20:57:05 +01:00
f62f3af483 Revert "Autorun Flask app"
This reverts commit e0e0b3c5e3.

Flask applications are not supposed to be run this way. We run the
endpoints with uwsgi in prod or with the flask program for debugging.

https://flask.palletsprojects.com/en/3.0.x/api/#flask.Flask.run
2023-12-24 03:36:33 +01:00
gsemaj
f02c960497 Use same rank for tied scores 2023-12-22 22:14:05 -08:00
gsemaj
e0e0b3c5e3 Autorun Flask app 2023-12-22 22:04:05 -08:00
gsemaj
615bd3d9a3 Add ranked endpoint server 2023-12-22 21:48:18 -08:00
gsemaj
4d6490933d Add option for score capping 2023-12-19 13:21:37 -05:00
gsemaj
073666e6f4 Add OG racing score conversion script 2023-10-11 18:54:35 -04:00
gsemaj
7af28d8e70 fix submesh overlap 2023-09-09 17:45:47 -07:00
gsemaj
3bb3df7a2d Split terrain into chunks of chunk_size 2023-09-09 16:40:22 -07:00
gsemaj
f6dc2d690f code to show all selected faces 2023-09-09 11:24:47 -07:00
gsemaj
af2988e062 fix error 2023-09-09 10:43:43 -07:00
gsemaj
03a3e21532 Fix UVs 2023-09-09 10:28:48 -07:00
gsemaj
20d5f6231c Export as OBJ 2023-07-20 14:27:39 -04:00
gsemaj
131297552e Fix normals 2023-07-20 14:27:24 -04:00
gsemaj
ef8ae1fb6e Fix vertex positioning and outfile name 2023-07-20 14:04:00 -04:00
gsemaj
3aabb35f33 Add terrain mesh extractor 2023-07-13 16:18:09 -04:00
gsemaj
7b750db9f9 Update README 2022-08-11 15:56:07 -04:00
gsemaj
aa564926a0 Fix CameraPos translation 2022-08-11 15:46:58 -04:00
gsemaj
2496f04987 Add semantic to f2a struct 2022-08-11 13:21:33 -04:00
gsemaj
916857edc3 Unity 3 fixes 2022-08-11 12:20:51 -04:00
gsemaj
3791e889c8 Add support for multiple subprograms 2022-08-11 11:53:38 -04:00
gsemaj
d0e67d55c9 Standalone support for basic fragment shaders 2022-08-11 04:06:03 -04:00
gsemaj
869d5b1976 Add support for SetTexture 2022-08-10 21:03:56 -04:00
gsemaj
8725dd1e4e Entry points and arg fix 2022-08-10 21:01:51 -04:00
gsemaj
8fbe59e5a1 Fix rcp bug 2022-08-10 20:09:59 -04:00
gsemaj
a53fb21621 Add .gitignore 2022-08-10 20:09:22 -04:00
gsemaj
9dd5db86eb Update README 2022-08-10 20:06:02 -04:00
gsemaj
4ace5f065f Add dx2cg 2022-08-10 19:47:13 -04:00
9 changed files with 924 additions and 0 deletions

136
db_migration/ogracing.py Normal file
View File

@@ -0,0 +1,136 @@
#!/usr/bin/env python3
# OG racing score conversion script
#
# This script is meant to update racing scoreboards from Retro-style scores
# to OG-accurate scores. Be careful to only run this once!
#
# The script will create a backup of your DB, as well as a log file
# containing a record of all the changes that were made. Make sure to
# preserve this log file in case you need to reference it in the future.
#
# If something goes wrong with the first invocation, you'll need to move the
# DB backup and log files out of the way before the script can be re-run.
#
# If all goes well, you should see different, OG-true scores in IZ scoreboards.
#
# Do not hesitate to ask the OF developers for assistance if necessary.
import sys
import os.path
import shutil
import logging
import json
from math import exp
import sqlite3
LOGFILE = 'ogracing.log'
DRY_RUN = False # set to True if testing the script
CAP_SCORES = True # set to False to disable capping scores to the IZ maximum
class EpData:
max_score = 0
pod_factor = 0
max_pods = 0
max_time = 0
time_factor = 0
scale_factor = 0
class RaceResult:
epid = 0
playerid = 0
score = 0
timestamp = 0
ring_count = 0
time = 0
def check_version(cur):
cur.execute("SELECT Value FROM Meta WHERE Key = 'DatabaseVersion';")
ver = cur.fetchone()[0]
if ver < 2:
sys.exit('fatal: you must first upgrade your DB version to 2 by running the server at least once')
def load_epinfo():
epinfo = {}
with open("drops.json", "r") as f:
dat = json.load(f)["Racing"]
for key in dat:
val = dat[key]
epdata = EpData()
epid = int(val["EPID"])
epdata.max_score = int(val["ScoreCap"])
epdata.pod_factor = float(val["PodFactor"])
epdata.max_pods = int(val["TotalPods"])
epdata.max_time = int(val["TimeLimit"])
epdata.time_factor = float(val["TimeFactor"])
epdata.scale_factor = float(val["ScaleFactor"])
epinfo[epid] = epdata
return epinfo
def get_results(cur):
results = []
cur.execute('SELECT EPID, PlayerID, Timestamp, RingCount, Time, Score FROM RaceResults;')
for x in cur.fetchall():
result = RaceResult()
result.epid = int(x[0])
result.playerid = int(x[1])
result.timestamp = int(x[2])
result.ring_count = int(x[3])
result.time = int(x[4])
result.score = int(x[5])
results.append(result)
return results
def process_result(cur, result, epinfo):
epdata = epinfo[result.epid]
pod_score = (epdata.pod_factor * result.ring_count) / epdata.max_pods
time_score = (epdata.time_factor * result.time) / epdata.max_time
newscore = int(exp(pod_score - time_score + epdata.scale_factor))
if CAP_SCORES and newscore > epdata.max_score:
logging.warning('score {} greater than max ({}) for epid {}, capping'.format(newscore, epdata.max_score, result.epid))
print('warning: score {} greater than max ({}) for epid {}, capping'.format(newscore, epdata.max_score, result.epid))
newscore = epdata.max_score
logging.info('* {} -> {} (EPID: {}, pods: {}, time: {})'.format(result.score, newscore, result.epid, result.ring_count, result.time))
if not DRY_RUN:
cur.execute('UPDATE RaceResults SET Score = ? WHERE (PlayerID, Timestamp) = (?, ?);', (newscore, result.playerid, result.timestamp))
def main(path):
if os.path.isfile(LOGFILE):
sys.exit('fatal: a log file named {} already exists. refusing to modify.'.format(LOGFILE))
logging.basicConfig(filename=LOGFILE, level=20, format='%(levelname)s: %(message)s')
if not os.path.isfile(path):
sys.exit('fatal: {} is not a file'.format(path))
bakpath = path + '.ogracing.bak'
if os.path.isfile(bakpath):
sys.exit('fatal: a DB backup named {} already exists. refusing to overwrite.'.format(bakpath))
shutil.copy(path, bakpath)
logging.info('saved database backup to {}'.format(bakpath))
print('saved database backup to {}'.format(bakpath))
epinfo = load_epinfo()
with sqlite3.connect(path) as db:
cur = db.cursor()
check_version(cur)
results = get_results(cur)
for result in results:
process_result(cur, result, epinfo)
logging.info('done.')
print('done.')
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit('usage: {} database.db'.format(sys.argv[0]))
main(sys.argv[1])

2
dx2cg/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
__pycache__

11
dx2cg/README.md Normal file
View File

@@ -0,0 +1,11 @@
# dx2cg
Tools for converting d3d9 shader assembly to HLSL/Cg.
- `disassembler.py`: Takes in d3d9 assembly and gives back the HLSL equivalent.
- `swapper.py`: Searches a shader file for d3d9 assembly and calls the disassembler to replace it with HLSL.
- `main.py`: Executes the swapper on every file in a path, writing the changes to new files.
## Known issues
- Only vertex shaders with profile `vs_1_1` are supported
- Only fragment shaders with profile `ps_2_0` are supported
- Only a limited set of instructions (those used by FF and Unity 2.6) are supported
- Properties that don't begin with an underscore do not get captured as locals

360
dx2cg/disassembler.py Normal file
View File

@@ -0,0 +1,360 @@
#!/usr/bin/env python
# coding: utf-8
# d3d9 to cg crude dissassembler
# ycc 08/08/2022
import re
import sys
legacy = False # True for 2.6
reserved = {
"_Time",
"_SinTime",
"_CosTime",
"_ProjectionParams",
"_PPLAmbient",
"_ObjectSpaceCameraPos",
"_ObjectSpaceLightPos0",
"_ModelLightColor0",
"_SpecularLightColor0",
"_Light2World0", "_World2Light0", "_Object2World", "_World2Object", "_Object2Light0",
"_LightDirectionBias",
"_LightPositionRange",
}
decls = {
"dcl_position": "float4 {0} = vdat.vertex;",
"dcl_normal": "float4 {0} = float4(vdat.normal, 0);",
"dcl_texcoord0": "float4 {0} = vdat.texcoord;",
"dcl_texcoord1": "float4 {0} = vdat.texcoord1;",
"dcl_color": "float4 {0} = vdat.color;",
"dcl_2d": "; // no operation",
"dcl": "float4 {0[0]}{0[1]} = pdat.{0[0]}{0[1]};",
"def": "const float4 {0} = float4({1}, {2}, {3}, {4});",
}
ops = {
"mov": "{0} = {1};",
"add": "{0} = {1} + {2};",
"mul": "{0} = {1} * {2};",
"mad": "{0} = {1} * {2} + {3};",
"dp4": "{0} = dot((float4){1}, (float4){2});",
"dp3": "{0} = dot((float3){1}, (float3){2});",
"min": "{0} = min({1}, {2});",
"max": "{0} = max({1}, {2});",
"rsq": "{0} = rsqrt({1});",
"frc": "{0} = float4({1}.x - (float)floor({1}.x), {1}.y - (float)floor({1}.y), {1}.z - (float)floor({1}.z), {1}.w - (float)floor({1}.w));",
"slt": "{0} = float4(({1}.x < {2}.x) ? 1.0f : 0.0f, ({1}.y < {2}.y) ? 1.0f : 0.0f, ({1}.z < {2}.z) ? 1.0f : 0.0f, ({1}.w < {2}.w) ? 1.0f : 0.0f);",
"sge": "{0} = float4(({1}.x >= {2}.x) ? 1.0f : 0.0f, ({1}.y >= {2}.y) ? 1.0f : 0.0f, ({1}.z >= {2}.z) ? 1.0f : 0.0f, ({1}.w >= {2}.w) ? 1.0f : 0.0f);",
"rcp": "{0} = ({1} == 0.0f) ? FLT_MAX : (({1} == 1.0f) ? {1} : (1 / {1}));",
"texld": "{0} = tex2D({2}, (float2){1});",
}
struct_a2v = """struct a2v {
\tfloat4 vertex : POSITION;
\tfloat3 normal : NORMAL;
\tfloat4 texcoord : TEXCOORD0;
\tfloat4 texcoord1 : TEXCOORD1;
\tfloat4 tangent : TANGENT;
\tfloat4 color : COLOR;
};
"""
v2f_postype = "POSITION" if legacy else "SV_POSITION"
struct_v2f = f"""struct v2f {{
\tfloat4 pos : {v2f_postype};
\tfloat4 t0 : TEXCOORD0;
\tfloat4 t1 : TEXCOORD1;
\tfloat4 t2 : TEXCOORD2;
\tfloat4 t3 : TEXCOORD3;
\tfloat fog : FOG;
\tfloat4 d0 : COLOR0;
\tfloat4 d1 : COLOR1;
}};
"""
struct_f2a = """struct f2a {
\tfloat4 c0 : COLOR0;
};
"""
cg_header = """CGPROGRAM
#include "UnityCG.cginc"
#pragma exclude_renderers xbox360 ps3 gles
"""
cg_footer = """ENDCG"""
vertex_func = """v2f vert(a2v vdat) {{
\tfloat4 r0, r1, r2, r3, r4;
\tfloat4 tmp;
\tv2f o;
{0}
\treturn o;
}}
"""
fragment_func = """f2a frag(v2f pdat) {{
\tfloat4 r0, r1, r2, r3, r4;
\tfloat4 tmp;
\tf2a o;
{0}
\treturn o;
}}
"""
def process_header(prog):
keywords = []
loctab = {}
locdecl = []
binds = []
i = 0
lighting = False
textures = 0
while i < len(prog):
line = prog[i]
if line.startswith("Keywords"):
keywords = re.findall("\"[\w\d]+\"", line)
del prog[i]
i = i - 1
elif line.startswith("Bind"):
binds.append(line)
del prog[i]
i = i - 1
elif line.startswith("Local") or line.startswith("Matrix"):
dec = line.split(' ')
key = int(dec[1][:-1])
if dec[2][0] == '[':
# singleton
val = dec[2][1:-1]
if val[0] == '_' and val not in reserved:
loctype = "float4" if dec[0] == "Local" else "float4x4"
locdecl.append(f"{loctype} {val};")
elif dec[2][0] == '(':
#components
vals = dec[2][1:-1].split(',')
for j, v in enumerate(vals):
if v[0] == '[':
vals[j] = v[1:-1]
if vals[j][0] == '_' and vals[j] not in reserved:
locdecl.append(f"float {vals[j]};")
val = f"float4({vals[0]},{vals[1]},{vals[2]},{vals[3]})"
lightval = re.match("glstate_light(\d)_([a-zA-Z]+)", val)
if lightval:
val = f"glstate.light[{lightval[1]}].{lightval[2]}"
lighting = True
elif val == "_ObjectSpaceCameraPos" and not legacy:
val = "mul(_World2Object, float4(_WorldSpaceCameraPos, 1.0f))"
elif val == "_ObjectSpaceLightPos0" and not legacy:
val = "mul(_World2Object, _WorldSpaceLightPos0)"
lighting = True
elif val == "glstate_lightmodel_ambient":
val = "glstate.lightmodel.ambient"
lighting = True
elif val.startswith("glstate_matrix_texture"):
val = f"glstate.matrix.texture[{val[-1]}]" if legacy else f"UNITY_MATRIX_TEXTURE{val[-1]}"
elif val == "glstate_matrix_mvp":
val = "glstate.matrix.mvp" if legacy else "UNITY_MATRIX_MVP"
elif val == "glstate_matrix_modelview0":
val = "glstate.matrix.modelview[0]" if legacy else "UNITY_MATRIX_MV"
elif val == "glstate_matrix_transpose_modelview0":
val = "glstate.matrix.transpose.modelview[0]" if legacy else "UNITY_MATRIX_T_MV"
elif val == "glstate_matrix_invtrans_modelview0":
val = "glstate.matrix.invtrans.modelview[0]" if legacy else "UNITY_MATRIX_IT_MV"
elif val.startswith("glstate"):
raise ValueError(f"Unrecognized glstate: {val}")
if dec[0] == "Local":
loctab[f"c{key}"] = val
elif dec[0] == "Matrix":
for offset in range(0,4):
loctab[f"c{key + offset}"] = f"{val}[{offset}]"
del prog[i]
i = i - 1
elif line.startswith("SetTexture"):
dec = line.split(' ')
if dec[2] != "{2D}":
raise ValueError(f"Unknown texture type {dec[2]}")
key = f"s{textures}"
val = dec[1][1:-1]
loctab[key] = val
locdecl.append(f"sampler2D {val};")
textures = textures + 1
del prog[i]
i = i - 1
i = i + 1
# print(loctab)
return (keywords, loctab, locdecl, binds, lighting)
def resolve_args(args, loctab, consts):
for a in range(0, len(args)):
arg = args[a]
neg = ""
if arg[0] == '-':
arg = arg[1:]
neg = "-"
# save swizzler!
dot = arg.find(".")
if dot > -1:
swiz = arg[dot:]
arg = arg[:dot]
else:
swiz = ""
if arg[0] == 'r':
pass
elif arg[0] == 'v':
pass
elif arg[0] == 't':
pass
elif arg[0] == 'c':
if arg not in consts:
arg = loctab[arg]
elif arg[0] == 's':
arg = loctab[arg]
elif arg[0] == 'o':
arg = f"o.{arg[1:].lower()}"
elif re.match("[+-]?([0-9]*[.])?[0-9]+", arg):
pass
else:
raise ValueError(f"Unknown arg {arg}")
args[a] = neg + arg + swiz
def decode(code, args):
if code in decls:
return [decls[code].format(*args)]
elif code in ops:
target = args[0]
if target == "o.fog":
return [ops[code].format(*args)]
dot = re.search("\.[xyzw]+", target)
if dot:
swiz = target[dot.start()+1:]
target = target[:dot.start()]
else:
swiz = "xyzw"
lines = [ops[code].format("tmp", *args[1:])]
for c in swiz:
lines.append(f"{target}.{c} = tmp.{c};")
return lines
else:
raise ValueError(f"Unknown opcode {code}")
def process_asm(asm, loctab):
shadertype = ""
if asm[0] == "\"vs_1_1":
shadertype = "vertex"
elif asm[0] == "\"ps_2_0":
shadertype = "fragment"
else:
raise ValueError(f"Unsupported shader type: {asm[0][1:]}")
consts = set()
translated = []
i = 1
while i < len(asm):
instruction = asm[i]
if instruction == "\"":
break
space = instruction.find(" ")
if space == -1:
code = instruction
args = []
else:
code = instruction[:space]
args = instruction[space+1:].split(", ")
if code == "def":
consts.add(args[0])
pp = code.find("_pp")
if pp > -1:
code = code[:pp]
resolve_args(args, loctab, consts)
disasm = decode(code, args)
# print(f"{instruction} \t==>\t{disasm}")
disasm.insert(0, f"// {instruction}")
translated.extend(disasm)
i = i + 1
return (shadertype, translated)
def disassemble(blocks):
shaders = {}
keywords = set()
locdecl = set()
binds = set()
lighting = False
for block in blocks:
asm = block.split('\n')[1:-1]
(kw, ltab, ldecl, bds, light) = process_header(asm)
keywords.update(kw)
locdecl.update(ldecl)
binds.update(bds)
lighting |= light
(shadertype, disasm) = process_asm(asm, ltab)
shaders[shadertype] = disasm
text = ""
if len(binds) > 0:
text += "BindChannels {\n"
for b in binds:
text += f"\t{b}\n"
text += "}\n"
if lighting:
text += "Lighting On\n"
text += cg_header
if len(keywords) > 0:
text += "#pragma multi_compile " + " ".join(keywords)
if "vertex" in shaders:
text += "#pragma vertex vert\n"
if "fragment" in shaders:
text += "#pragma fragment frag\n"
text += "\n"
if "vertex" in shaders:
text += struct_a2v + "\n"
text += struct_v2f + "\n"
if "fragment" in shaders:
text += struct_f2a + "\n"
text += "\n".join(locdecl) + "\n"
if "vertex" in shaders:
text += "\n" + vertex_func.format("\t" + "\n\t".join(shaders["vertex"]))
if "fragment" in shaders:
text += "\n" + fragment_func.format("\t" + "\n\t".join(shaders["fragment"]))
text += cg_footer
return text
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: disassembler.py <filename>")
else:
with open(sys.argv[1], "r") as fi:
buf = fi.read()
disasm = disassemble(buf.split('~'))
print(disasm)

37
dx2cg/main.py Normal file
View File

@@ -0,0 +1,37 @@
#!/usr/bin/env python
# coding: utf-8
import os
import sys
from swapper import process
def process_file(filename, suffix):
dot = filename.rfind(".")
if dot > -1:
outfile_name = filename[:dot] + suffix + filename[dot:]
else:
outfile_name = filename + suffix
return process(filename, outfile_name)
def process_batch(path, suffix="_hlsl"):
files = os.listdir(path)
for f in files:
if os.path.isdir(f):
process_batch(f"{path}/{f}")
else:
try:
if process_file(f"{path}/{f}", suffix):
print(f"Processed {f}")
else:
print(f"Skipping {f}")
except ValueError as err:
print(f"Failed to process {f}: {err}")
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: main.py <folder> [outfile-suffix]")
elif len(sys.argv) == 2:
process_batch(sys.argv[1])
else:
process_batch(*sys.argv[1:3])

77
dx2cg/swapper.py Normal file
View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python
# coding: utf-8
# parser for replacing d3d9 subprograms in shaderlab files with HLSL/CG
# ycc 08/08/2022
import re
import sys
from disassembler import disassemble
tabs = 3
def indent(block):
lines = block.split('\n')
for i in range(0, len(lines)-1):
lines[i] = tabs * "\t" + lines[i]
return "\n".join(lines)
def find_closing_bracket(block, i):
count = 0
while i < len(block):
if block[i] == '{':
count = count + 1
if block[i] == '}':
count = count - 1
if count == 0:
return i
i = i + 1
raise ValueError(f"Block at {i} has no closing bracket")
def process_program(prog):
# print("processing:\n" + prog)
subprogs = []
subprog_index = prog.find("SubProgram \"d3d9")
while subprog_index > -1:
subprog_end_index = find_closing_bracket(prog, subprog_index)
subprog = prog[subprog_index:subprog_end_index+1]
subprogs.append(subprog)
prog = prog[subprog_end_index+1:]
subprog_index = prog.find("SubProgram \"d3d9")
if len(subprogs) < 1:
raise ValueError(f"Program has no d3d9 subprograms")
processed = disassemble(subprogs) + "\n"
return indent(processed)
def process_shader(shader):
buf = shader
processed = ''
program_index = buf.find("Program \"\"")
while program_index > -1:
processed = processed + buf[:program_index]
buf = buf[program_index:]
line = re.search("#LINE [0-9]+\n", buf)
if not line:
raise ValueError(f"Program at {program_index} has no #LINE marker")
end_index = line.end() + 1
program_section = buf[:end_index+1]
processed = processed + process_program(program_section)
buf = buf[end_index+1:]
program_index = buf.find("Program \"\"")
processed = processed + buf
return processed
def process(fn_in, fn_out):
with open(fn_in, "r") as fi:
buf = fi.read()
processed = process_shader(buf)
if buf != processed:
with open(fn_out, "w") as fo:
fo.write(processed)
return True
return False
if __name__ == "__main__":
if len(sys.argv) < 3:
print("Usage: swapper.py <file-in> <file-out>")
else:
process(*sys.argv[1:3])

146
rankendpoint.py Normal file
View File

@@ -0,0 +1,146 @@
# This script serves an HTTP endpoint that provides the racing scores.
#
# Example invocation for testing:
# $ RANKENDPOINT_DBPATH=/path/to/database.db RANKENDPOINT_ROUTE=/getranks flask --app rankendpoint.py run
#
# Example invocation in production (behind a properly configured gateway like nginx):
# $ RANKENDPOINT_DBPATH=/path/to/database.db RANKENDPOINT_ROUTE=/getranks uwsgi \
# -s localhost:3031 --manage-script-name --mount /=rankendpoint:app --plugin python3
from flask import Flask, request
app = Flask(__name__)
import sqlite3
import sys
import os
header = "SUCCESS"
db_path = os.environ.get('RANKENDPOINT_DBPATH')
route = os.environ.get('RANKENDPOINT_ROUTE')
if None in (db_path, route):
sys.exit('must set RANKENDPOINT_DBPATH and RANKENDPOINT_ROUTE environment variables')
# Opens database in read-only mode
# Checking same thread disabled for now, which is fine since we never modify anything
try:
db = sqlite3.connect('file:{}?mode=ro'.format(db_path), uri=True, check_same_thread=False)
cur = db.cursor()
except Exception as ex:
print(ex)
sys.exit()
#db.set_trace_callback(print)
def fetch_ranks(epid, date, num):
sql = """
SELECT * FROM (
SELECT RaceResults.PlayerID,
Players.FirstName,
Players.LastName,
RaceResults.Score
FROM RaceResults
INNER JOIN Players ON RaceResults.PlayerID=Players.PlayerID
WHERE EPID=? AND
DATETIME(Timestamp,'unixepoch') > (SELECT DATETIME('now', ?))
ORDER BY Score DESC
)
GROUP BY PlayerID
ORDER BY Score DESC
"""
if num > -1:
sql += "LIMIT ?"
args = (epid, date, num)
else:
args = (epid, date)
cur = db.execute(sql + ";", args)
rows = cur.fetchall()
return rows
def fetch_my_ranks(pcuid, epid, date):
sql = """
SELECT RaceResults.PlayerID,
Players.FirstName,
Players.LastName,
RaceResults.Score
FROM RaceResults
INNER JOIN Players ON RaceResults.PlayerID=Players.PlayerID
WHERE RaceResults.PlayerID=? AND EPID=? AND
DATETIME(Timestamp,'unixepoch') > (SELECT DATETIME('now', ?))
ORDER BY Score DESC LIMIT 1;
"""
args = (pcuid, epid, date)
cur = db.execute(sql, args)
rows = cur.fetchall()
return rows
def get_score_entries(data, name):
# Uncomment if you want placeholders in top 10 ranks ala Retro
#if not name.startswith("my"):
# while len(data) < 10:
# data.append(((999, 'hehe', 'dong', 1)))
scores="<{}>\n".format(name)
rank = 1
last_score = -1
for item in data:
score = item[3]
if score == last_score:
rank -= 1
scores+='\t<score>PCUID="{}" Score="{}" Rank="{}" FirstName="{}" LastName="{}"</score>\n'.format(item[0], score, rank, item[1], item[2])
rank += 1
last_score = score
scores+="</{}>\n".format(name)
return scores
# route should be something like /getranks
@app.route(f'{route}', methods=['POST'])
def rankings():
#print("PCUID:", request.form['PCUID'])
#print("EP_ID:", request.form['EP_ID'])
# Input Validation
try:
pcuid = int(request.form['PCUID'])
epid = int(request.form['EP_ID'])
num = 10 if 'NUM' not in request.form else int(request.form['NUM'])
except ValueError as verr:
return "Request param does not convert to int", 400
except Exception as ex:
return "Error converting request param to int", 500
# EP_ID must be between 1 and 33. also, ep #6 doesn't exist
if not (1 <= epid <= 33) or (epid == 6):
return "Invalid EP_ID", 400
# Get everything we need from the DB...
myday = fetch_my_ranks(pcuid, epid, '-1 day')
day = fetch_ranks(epid, '-1 day', num)
myweek = fetch_my_ranks(pcuid, epid, '-7 day')
week = fetch_ranks(epid, '-7 day', num)
mymonth = fetch_my_ranks(pcuid, epid, '-1 month')
month = fetch_ranks(epid, '-1 month', num)
myalltime = fetch_my_ranks(pcuid, epid, '-999 year')
alltime = fetch_ranks(epid, '-999 year', num)
# Slap that all into an "xml"...
xmlbody = ""
xmlbody += get_score_entries(myday, "myday")
xmlbody += get_score_entries(day, "day")
xmlbody += get_score_entries(myweek, "myweek")
xmlbody += get_score_entries(week, "week")
xmlbody += get_score_entries(mymonth, "mymonth")
xmlbody += get_score_entries(month, "month")
xmlbody += get_score_entries(myalltime, "myalltime")
xmlbody += get_score_entries(alltime, "alltime")
# and send it off!
return header + xmlbody

View File

@@ -0,0 +1,150 @@
from unitypackff.asset import Asset
from unitypackff.environment import UnityEnvironment
import bpy
import bmesh
import os
dongpath = r'C:\Users\gents\AppData\LocalLow\Unity\Web Player\Cache\Fusionfall'
env = UnityEnvironment(base_path=dongpath)
outpath = r'C:\Users\gents\3D Objects\FFTerrainMeshes'
def uvs_from_vert(uv_layer, v):
uvs = []
for l in v.link_loops:
uv_data = l[uv_layer]
uvs.append(uv_data.uv)
return uvs
def rip_terrain_mesh(f, outpath, clear=False):
dong = Asset.from_file(f, environment=env)
for k, v in dong.objects.items():
if v.type == 'TerrainData':
terrainData = dong.objects[k].read()
terrain_width = terrainData['m_Heightmap']['m_Width'] - 1
terrain_height = terrainData['m_Heightmap']['m_Height'] - 1
scale_x = terrainData['m_Heightmap']['m_Scale']['x']
scale_z = terrainData['m_Heightmap']['m_Scale']['z']
scale_y = terrainData['m_Heightmap']['m_Scale']['y']
# create the terrain
bpy.ops.mesh.primitive_grid_add(x_subdivisions=terrain_width, y_subdivisions=terrain_height, size=128, enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1))
context = bpy.context
grid = context.edit_object
bpy.ops.object.mode_set(mode='EDIT')
bm = bmesh.from_edit_mesh(context.edit_object.data)
bm.verts.ensure_lookup_table()
for index, height in enumerate(terrainData['m_Heightmap']['m_Heights']):
# scale height
height_norm = height / (2 ** 15 - 2)
bm.verts[index].co.z = height_norm * scale_y
# pivot and scale x
bm.verts[index].co.x += terrain_width / 2
bm.verts[index].co.x *= scale_x
# pivot and scale z
bm.verts[index].co.y += terrain_height / 2
bm.verts[index].co.y *= scale_z
#print(f"{bm.verts[index].co.x}, {bm.verts[index].co.y}, {bm.verts[index].co.z}")
indices = []
shift_amt = abs(bm.verts[0].co.x - bm.verts[1].co.x)
uv_layer = bm.loops.layers.uv.active
uv_shift_amt = 1 / 256
# gather m_Shifts positions
for shift in terrainData['m_Heightmap']['m_Shifts']:
shift_index = shift['y'] + shift['x'] * 129
indices.append(shift_index)
v = bm.verts[shift_index]
flags = shift['flags'] # bits: +X -X +Y -Y
if flags & 0b1000: # +X
v.co.x += shift_amt
for uv in uvs_from_vert(uv_layer, v):
uv.x += uv_shift_amt
if flags & 0b0100: # -X
v.co.x -= shift_amt
for uv in uvs_from_vert(uv_layer, v):
uv.x -= uv_shift_amt
if flags & 0b0010: # +Y
v.co.y += shift_amt
for uv in uvs_from_vert(uv_layer, v):
uv.y += uv_shift_amt
if flags & 0b0001: # -Y
v.co.y -= shift_amt
for uv in uvs_from_vert(uv_layer, v):
uv.y -= uv_shift_amt
# apply triangulate modifier
mod = grid.modifiers.new("Triangulate", 'TRIANGULATE')
mod.quad_method = 'FIXED' # triangle orientation
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.modifier_apply(modifier="Triangulate")
bpy.ops.object.mode_set(mode="EDIT")
bm = bmesh.from_edit_mesh(context.edit_object.data)
bm.verts.ensure_lookup_table()
# flip diagonally
for v in bm.verts:
tmp = v.co.x
v.co.x = v.co.y
v.co.y = tmp
# flip normals
for f in bm.faces:
f.normal_flip()
# select vertex chunks and separate
verts = {}
for x in range(129):
for y in range(129):
idx = y + x * 129
v = bm.verts[idx]
verts[idx] = v
v.select = False
for f in v.link_faces:
f.select = False
chunk_size = 8
for x in range(128 // chunk_size):
for y in range(128 // chunk_size):
for i in range(x * chunk_size, x * chunk_size + chunk_size + 1):
for j in range(y * chunk_size, y * chunk_size + chunk_size + 1):
idx = j + i * 129
v = verts[idx]
v.select = True
bm.select_mode = {'VERT', 'EDGE', 'FACE'}
bm.select_flush_mode()
bpy.context.tool_settings.mesh_select_mode = (False, False, True)
bpy.ops.mesh.duplicate()
bpy.ops.mesh.separate(type='SELECTED')
bpy.ops.mesh.select_all(action='DESELECT')
# delete main
bpy.ops.object.mode_set(mode="OBJECT")
bpy.ops.object.select_all(action='DESELECT')
grid.select_set(True)
bpy.ops.object.delete()
# export
bpy.ops.object.select_all(action='SELECT')
name = terrainData['m_Name']
outfile = f"{name}.obj"
bpy.ops.export_scene.obj(filepath=os.path.join(outpath, outfile))
if(clear):
bpy.ops.object.mode_set(mode="OBJECT")
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete()
dongs = os.listdir(dongpath)
for dongname in dongs:
if not dongname.endswith("resourceFile"):
continue
assets = os.listdir(os.path.join(dongpath, dongname))
for assetname in assets:
if not assetname.startswith("CustomAssetBundle"):
continue
with open(os.path.join(dongpath, dongname, assetname), "rb") as f:
outdir = os.path.join(outpath, dongname, assetname)
os.makedirs(outdir, exist_ok=True)
rip_terrain_mesh(f, outdir)

View File

@@ -0,0 +1,5 @@
# Terrain Mesh Extractor
Blender + UPFF script to import terrain data as a mesh into Blender, then apply the shifts property to applicable vertices.
- Exports as FBX
- The fbx filenames are the index of the TerrainData object within the asset file
- Folders for asset bundles that had no terrain objects will be empty