Tools: Refactor translation string extraction to support multiple source files

Signed-off-by: SternXD <stern@sidestore.io>

f
This commit is contained in:
SternXD
2026-01-12 13:11:25 -05:00
committed by lightningterror
parent 082a28dc13
commit cc338cdd9d

View File

@@ -5,90 +5,110 @@ import os
START_IDENT = "// TRANSLATION-STRING-AREA-BEGIN" START_IDENT = "// TRANSLATION-STRING-AREA-BEGIN"
END_IDENT = "// TRANSLATION-STRING-AREA-END" END_IDENT = "// TRANSLATION-STRING-AREA-END"
src_file = os.path.join(os.path.dirname(__file__), "..", "pcsx2", "ImGui", "FullscreenUI.cpp") src_files = [
os.path.join(os.path.dirname(__file__), "..", "pcsx2", "ImGui", "FullscreenUI.cpp"),
os.path.join(os.path.dirname(__file__), "..", "pcsx2", "ImGui", "FullscreenUI_Settings.cpp"),
]
with open(src_file, "r") as f: def extract_strings_from_source(source_content):
full_source = f.read() """Extract FSUI translation strings from source content."""
strings = []
for token in ["FSUI_STR", "FSUI_CSTR", "FSUI_FSTR", "FSUI_NSTR", "FSUI_VSTR", "FSUI_ICONSTR", "FSUI_ICONSTR_S"]:
token_len = len(token)
last_pos = 0
while True:
last_pos = source_content.find(token, last_pos)
if last_pos < 0:
break
strings = [] if last_pos >= 8 and source_content[last_pos - 8:last_pos] == "#define ":
for token in ["FSUI_STR", "FSUI_CSTR", "FSUI_FSTR", "FSUI_NSTR", "FSUI_VSTR", "FSUI_ICONSTR", "FSUI_ICONSTR_S"]: last_pos += len(token)
token_len = len(token) continue
last_pos = 0
while True:
last_pos = full_source.find(token, last_pos)
if last_pos < 0:
break
if last_pos >= 8 and full_source[last_pos - 8:last_pos] == "#define ": if source_content[last_pos + token_len] == '(':
last_pos += len(token) start_pos = last_pos + token_len + 1
continue end_pos = source_content.find(")", start_pos)
s = source_content[start_pos:end_pos]
if full_source[last_pos + token_len] == '(': # Split into string arguments, removing "
start_pos = last_pos + token_len + 1 string_args = [""]
end_pos = full_source.find(")", start_pos) arg = 0;
s = full_source[start_pos:end_pos] cpos = s.find(',')
pos = s.find('"')
while pos >= 0 or cpos >= 0:
assert pos == 0 or s[pos - 1] != '\\'
if cpos == -1 or pos < cpos:
epos = pos
while True:
epos = s.find('"', epos + 1)
# found ')' in string, extend s to next ')'
if epos == -1:
end_pos = source_content.find(")", end_pos + 1)
s = source_content[start_pos:end_pos]
epos = pos
continue
# Split into sting arguments, removing " if s[epos - 1] == '\\':
string_args = [""] continue
arg = 0; else:
cpos = s.find(',') break
pos = s.find('"')
while pos >= 0 or cpos >= 0:
assert pos == 0 or s[pos - 1] != '\\'
if cpos == -1 or pos < cpos:
epos = pos
while True:
epos = s.find('"', epos + 1)
# found ')' in string, extend s to next ')'
if epos == -1:
end_pos = full_source.find(")", end_pos + 1)
s = full_source[start_pos:end_pos]
epos = pos
continue
if s[epos - 1] == '\\': assert epos > pos
continue string_args[arg] += s[pos+1:epos]
else: cpos = s.find(',', epos + 1)
break pos = s.find('"', epos + 1)
else:
arg += 1
string_args.append("")
cpos = s.find(',', cpos + 1)
assert epos > pos print(string_args)
string_args[arg] += s[pos+1:epos]
cpos = s.find(',', epos + 1) # FSUI_ICONSTR and FSUI_ICONSTR_S need to translate the only the second argument
pos = s.find('"', epos + 1) # other defines take only a single argument
if len(string_args) >= 2:
new_s = string_args[1]
else: else:
arg += 1 new_s = string_args[0]
string_args.append("")
cpos = s.find(',', cpos + 1)
print(string_args) assert len(new_s) > 0
# FSUI_ICONSTR and FSUI_ICONSTR_S need to translate the only the second argument if new_s not in strings:
# other defines take only a single argument strings.append(new_s)
if len(string_args) >= 2: last_pos += len(token)
new_s = string_args[1] return strings
else:
new_s = string_args[0]
assert len(new_s) > 0 def process_file(src_file):
"""Process a single source file extract strings and update its translation area."""
print(f"\nProcessing: {src_file}")
with open(src_file, "r") as f:
source = f.read()
#assert (end_pos - start_pos) < 300 start = source.find(START_IDENT)
#if (end_pos - start_pos) >= 300: end = source.find(END_IDENT)
# print("WARNING: Long string")
# print(new_s) if start < 0 or end <= start:
if new_s not in strings: print(f" Warning: No translation string area found in {src_file}")
strings.append(new_s) return 0
last_pos += len(token)
source_without_area = source[:start] + source[end + len(END_IDENT):]
strings = extract_strings_from_source(source_without_area)
print(f" Found {len(strings)} unique strings.")
new_area = ""
for string in strings:
new_area += f"TRANSLATE_NOOP(\"FullscreenUI\", \"{string}\");\n"
new_source = source[:start + len(START_IDENT) + 1] + new_area + source[end:]
with open(src_file, "w") as f:
f.write(new_source)
return len(strings)
print(f"Found {len(strings)} unique strings.") total_strings = 0
for src_file in src_files:
total_strings += process_file(src_file)
start = full_source.find(START_IDENT) print(f"\nTotal: {total_strings} unique strings across all files.")
end = full_source.find(END_IDENT)
assert start >= 0 and end > start
new_area = ""
for string in list(strings):
new_area += f"TRANSLATE_NOOP(\"FullscreenUI\", \"{string}\");\n"
full_source = full_source[:start+len(START_IDENT)+1] + new_area + full_source[end:]
with open(src_file, "w") as f:
f.write(full_source)