Tools: Refactor translation string extraction to support multiple source files

Signed-off-by: SternXD <stern@sidestore.io>

f
This commit is contained in:
SternXD
2026-01-12 13:11:25 -05:00
committed by lightningterror
parent 082a28dc13
commit cc338cdd9d

View File

@@ -5,30 +5,32 @@ import os
START_IDENT = "// TRANSLATION-STRING-AREA-BEGIN"
END_IDENT = "// TRANSLATION-STRING-AREA-END"
src_file = os.path.join(os.path.dirname(__file__), "..", "pcsx2", "ImGui", "FullscreenUI.cpp")
src_files = [
os.path.join(os.path.dirname(__file__), "..", "pcsx2", "ImGui", "FullscreenUI.cpp"),
os.path.join(os.path.dirname(__file__), "..", "pcsx2", "ImGui", "FullscreenUI_Settings.cpp"),
]
with open(src_file, "r") as f:
full_source = f.read()
strings = []
for token in ["FSUI_STR", "FSUI_CSTR", "FSUI_FSTR", "FSUI_NSTR", "FSUI_VSTR", "FSUI_ICONSTR", "FSUI_ICONSTR_S"]:
def extract_strings_from_source(source_content):
"""Extract FSUI translation strings from source content."""
strings = []
for token in ["FSUI_STR", "FSUI_CSTR", "FSUI_FSTR", "FSUI_NSTR", "FSUI_VSTR", "FSUI_ICONSTR", "FSUI_ICONSTR_S"]:
token_len = len(token)
last_pos = 0
while True:
last_pos = full_source.find(token, last_pos)
last_pos = source_content.find(token, last_pos)
if last_pos < 0:
break
if last_pos >= 8 and full_source[last_pos - 8:last_pos] == "#define ":
if last_pos >= 8 and source_content[last_pos - 8:last_pos] == "#define ":
last_pos += len(token)
continue
if full_source[last_pos + token_len] == '(':
if source_content[last_pos + token_len] == '(':
start_pos = last_pos + token_len + 1
end_pos = full_source.find(")", start_pos)
s = full_source[start_pos:end_pos]
end_pos = source_content.find(")", start_pos)
s = source_content[start_pos:end_pos]
# Split into sting arguments, removing "
# Split into string arguments, removing "
string_args = [""]
arg = 0;
cpos = s.find(',')
@@ -41,8 +43,8 @@ for token in ["FSUI_STR", "FSUI_CSTR", "FSUI_FSTR", "FSUI_NSTR", "FSUI_VSTR", "F
epos = s.find('"', epos + 1)
# found ')' in string, extend s to next ')'
if epos == -1:
end_pos = full_source.find(")", end_pos + 1)
s = full_source[start_pos:end_pos]
end_pos = source_content.find(")", end_pos + 1)
s = source_content[start_pos:end_pos]
epos = pos
continue
@@ -71,24 +73,42 @@ for token in ["FSUI_STR", "FSUI_CSTR", "FSUI_FSTR", "FSUI_NSTR", "FSUI_VSTR", "F
assert len(new_s) > 0
#assert (end_pos - start_pos) < 300
#if (end_pos - start_pos) >= 300:
# print("WARNING: Long string")
# print(new_s)
if new_s not in strings:
strings.append(new_s)
last_pos += len(token)
return strings
print(f"Found {len(strings)} unique strings.")
def process_file(src_file):
"""Process a single source file extract strings and update its translation area."""
print(f"\nProcessing: {src_file}")
start = full_source.find(START_IDENT)
end = full_source.find(END_IDENT)
assert start >= 0 and end > start
with open(src_file, "r") as f:
source = f.read()
new_area = ""
for string in list(strings):
start = source.find(START_IDENT)
end = source.find(END_IDENT)
if start < 0 or end <= start:
print(f" Warning: No translation string area found in {src_file}")
return 0
source_without_area = source[:start] + source[end + len(END_IDENT):]
strings = extract_strings_from_source(source_without_area)
print(f" Found {len(strings)} unique strings.")
new_area = ""
for string in strings:
new_area += f"TRANSLATE_NOOP(\"FullscreenUI\", \"{string}\");\n"
full_source = full_source[:start+len(START_IDENT)+1] + new_area + full_source[end:]
with open(src_file, "w") as f:
f.write(full_source)
new_source = source[:start + len(START_IDENT) + 1] + new_area + source[end:]
with open(src_file, "w") as f:
f.write(new_source)
return len(strings)
total_strings = 0
for src_file in src_files:
total_strings += process_file(src_file)
print(f"\nTotal: {total_strings} unique strings across all files.")