Latest lex_scan, script to copy to SME11, program to count references

This commit is contained in:
2025-07-16 14:45:21 +01:00
parent 8c3a0529a3
commit 032c544c53
3 changed files with 680 additions and 150 deletions

View File

@@ -4,6 +4,10 @@ import os
import re
import sys
import json
from pathlib import Path
from datetime import datetime
import glob
# Configure logger
@@ -23,7 +27,7 @@ file_handler.setFormatter(formatter)
# Console handler (WARNING and above)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.WARNING)
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(formatter)
# Add handlers to the logger
@@ -31,10 +35,12 @@ logger.addHandler(file_handler)
logger.addHandler(console_handler)
missing_files = []
themes = ["default", "AdminLTE"]
def validate_panel_name(panel_name):
if not panel_name[0].isupper():
logger.error(f"Error: Panel name \'{panel_name}\' must start with a capital letter.")
logger.error(f"Error: Panel name \'{panel_name}\' must start with a capital letter.")
sys.exit(1)
def get_full_base_path(system):
@@ -44,7 +50,7 @@ def check_controller_file_exists(system, panel):
full_base_path = get_full_base_path(system)
controller_path = os.path.join(full_base_path, "lib/SrvMngr/Controller", f"{panel}.pm")
if not os.path.exists(controller_path):
logger.error(f"Error: Controller file \'{controller_path}\' does not exist.")
logger.error(f"Error: Controller file \'{controller_path}\' does not exist.")
sys.exit(1)
return controller_path
@@ -88,9 +94,27 @@ def find_matching_files_variable_part(input_string, directory):
return matching_files
return []
def extract_string_for_exclusion(filename, prefix):
"""
Extracts the pattern _<prefix>_<alphan> (terminated by a dot) from the basename of the filename.
The prefix is passed as a variable. Returns the matched string or None if not found.
Logs when a string is added to the exclude list.
"""
#logger.info(f"extract:{prefix} {filename}")
base = os.path.basename(filename)
# Match: start, _<prefix>_, one or more alphanumerics, then a dot
pattern = rf'^_({re.escape(prefix)}_[a-zA-Z0-9]+)(?=\.)'
match = re.match(pattern, base)
if match:
result = match.group(1)
#logger.info(f"Returning '{result}' to exclude_list from file: {filename}")
return result
return None
def scan_application_files(system, panel, prefix, scan_general=False):
extracted_strings = {}
exclude_list = []
full_base_path = get_full_base_path(system)
@@ -105,7 +129,6 @@ def scan_application_files(system, panel, prefix, scan_general=False):
scan_file_for_lexical_strings(controller_custom_path, prefix, extracted_strings, scan_general)
# Template files
themes = ["default", "AdminLTE"]
for theme in themes:
template_base_path = os.path.join(full_base_path, "themes", theme, "templates")
if panel in ['Backup','Yum','Bugreport']:
@@ -114,7 +137,7 @@ def scan_application_files(system, panel, prefix, scan_general=False):
# print(f"Matching template files: {panel.lower()!r} -> Matches: {[os.path.basename(m) for m in template_files]}")
for file_path in template_files:
panel_template_path = os.path.join(template_base_path, f"{file_path}")
logger.warning(f"Scanning panel template file: {panel_template_path}")
logger.info(f"Scanning panel template file: {panel_template_path}")
scan_file_for_lexical_strings(panel_template_path, prefix, extracted_strings, scan_general)
else:
panel_template_path = os.path.join(template_base_path, f"{panel.lower()}.html.ep")
@@ -130,20 +153,33 @@ def scan_application_files(system, panel, prefix, scan_general=False):
partial_path = os.path.join(partials_dir, filename)
logger.info(f"Scanning partial template file: {partial_path}")
scan_file_for_lexical_strings(partial_path, prefix, extracted_strings, scan_general)
# and add the <_prefix_<name>_> bit to the exclude list
result = extract_string_for_exclusion(filename, prefix)
if result:
if result not in exclude_list:
logger.info(f"Adding {result}")
exclude_list.append(result)
# Deduplicate lists of dicts in extracted_strings
for key, value in extracted_strings.items():
if isinstance(value, list) and value and isinstance(value[0], dict):
# Deduplicate list of dicts using JSON serialization
seen = set()
deduped = []
for d in value:
ser = json.dumps(d, sort_keys=True)
if ser not in seen:
seen.add(ser)
deduped.append(d)
extracted_strings[key] = deduped
# # Deduplicate lists of dicts in extracted_strings
# for key, value in extracted_strings.items():
# if isinstance(value, list) and value and isinstance(value[0], dict):
# # Deduplicate list of dicts using JSON serialization
# seen = set()
# deduped = []
# for d in value:
# ser = json.dumps(d, sort_keys=True)
# if ser not in seen:
# seen.add(ser)
# deduped.append(d)
# extracted_strings[key] = deduped
# And take out the excluded ones
# Assumes extracted_strings is a dict where values are lists of dicts or strings
if exclude_list:
logger.info(f"Found {len(exclude_list)} items in exclude list")
for key in list(extracted_strings.keys()):
if key in exclude_list:
del extracted_strings[key]
return extracted_strings
def scan_file_for_lexical_strings(filepath, prefix, extracted_strings_dict, scan_general):
@@ -184,7 +220,7 @@ def scan_file_for_lexical_strings(filepath, prefix, extracted_strings_dict, scan
if filepath not in extracted_strings_dict[s]:
extracted_strings_dict[s].append(filepath)
else:
logger.error(f"Unexpected chars ({s}) found in {filepath}")
logger.error(f"Unexpected chars ({s}) found in {filepath}")
continue
else:
pattern = re.compile(
@@ -204,37 +240,48 @@ def scan_file_for_lexical_strings(filepath, prefix, extracted_strings_dict, scan
if filepath not in extracted_strings_dict[s]:
extracted_strings_dict[s].append(filepath)
else:
logger.error(f"Unexpected chars ({s}) found in {filepath}")
logger.error(f"Unexpected chars ({s}) found in {filepath}")
continue
def read_lex_file(filepath):
logger.info(f"Reading file: {filepath}")
lex_data = {}
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
# Improved regex: handles single/double quotes and escaped quotes in value
pattern = r"""
(['"])(.*?)\1 # key in quotes
\s*=>\s*
(['"])((?:\\.|(?!\3).)*)\3 # value in quotes, allowing escaped chars
"""
matches = re.findall(pattern, content, re.DOTALL | re.VERBOSE)
for _, key, quote, value in matches:
# Unescape the quote character and backslashes in value
value = value.replace(f"\\{quote}", quote).replace("\\\\", "\\")
lex_data[key] = value
return lex_data
logger.info(f"Reading file: {filepath}")
lex_data = {}
if not os.path.exists(filepath):
logger.warning(f"⚠️ File does not exist: {filepath}. Returning empty dictionary.")
return lex_data
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
# Improved regex: handles single/double quotes and escaped quotes in value
pattern = r"""
(['"])(.*?)\1 # key in quotes
\s*=>\s*
(['"])((?:\\.|(?!\3).)*)\3 # value in quotes, allowing escaped chars
"""
matches = re.findall(pattern, content, re.DOTALL | re.VERBOSE)
for _, key, quote, value in matches:
# Unescape the quote character and backslashes in value
value = value.replace(f"\\{quote}", quote).replace("\\\\", "\\")
lex_data[key] = value
return lex_data
def write_lex_file(filepath, lex_data):
"""
Writes a dictionary to a lex file, sorted alphabetically by key (case-insensitive).
Adds a header with the current date and time.
"""
# Sort the dictionary by key, case-insensitive
sorted_items = sorted(lex_data.items(), key=lambda item: item[0].lower())
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# Extract panel name using regex
match = re.search(r'.*/output/([^/]+)/', filepath)
panel_name = match.group(1) if match else 'Unknown'
logger.info(f"{filepath} {panel_name}")
header = f"#\n# Lex file for {panel_name} generated on {now}\n#\n"
with open(filepath, 'w', encoding='utf-8') as f:
f.write(header)
for key, value in sorted_items:
value = value.replace("'",'"')
f.write(f"'{key}' => '{value}',{os.linesep}")
value = value.replace("'", '"')
f.write(f"'{key}' => '{value}',{os.linesep}")
def read_languages_json(filepath):
@@ -245,18 +292,61 @@ def read_languages_json(filepath):
languages = json.load(f)
return languages
def update_file_with_new_lexical_string(filepath, old_string, new_string):
try:
with open(filepath, 'r') as f:
content = f.read()
new_content = content.replace(old_string, new_string)
with open(filepath, 'w') as f:
f.write(new_content)
#map any single quotes to double
logger.info(f"Updated \'{old_string}\' to \'{new_string}\' in file: {filepath}")
except Exception as e:
logger.error(f"Error updating file {filepath}: {e}")
def convert_single_to_double_quotes(text):
"""
Replace all strings in single quotes with double quotes,
while preserving single-quoted apostrophes (e.g., escaped or internal).
Example: 'It\\'s fine'"It's fine"
"""
def replacer(match):
content = match.group(1)
# Unescape escaped single quotes to real single quotes
content = content.replace("\\'", "'")
# Escape any existing double quotes inside content
content = content.replace('"', r'\"')
return f'"{content}"'
# Regex explanation:
# (?<!\\) : negative lookbehind to ensure the quote is not escaped
# '((?:\\'|[^'])*?)' : capture content inside the quotes, allowing escaped quotes inside
pattern = r"'((?:\\'|[^'])*?)'"
return re.sub(pattern, replacer, text)
def update_file_with_new_lexical_string(filepath, old_string, new_string, filepath_new=None):
"""
Update occurrences of old_string with new_string in the file at filepath,
and write the result to filepath_new (defaults to filepath).
"""
if filepath_new is None:
filepath_new = filepath
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
if old_string not in content:
logger.warning(f"⚠️ No occurrences of '{old_string}' found in {filepath}")
return False
new_content = content.replace(old_string, new_string)
if old_string in new_content:
logger.warning(f"⚠️ Still occurrences of '{old_string}' found in {filepath}")
return False
# Optionally, map any single quotes to double quotes in the result
new_content = convert_single_to_double_quotes(new_content)
with open(filepath_new, 'w', encoding='utf-8') as f:
f.write(new_content)
# ✅ Read back and verify
with open(filepath_new, 'r', encoding='utf-8') as f:
saved_output = f.read()
if new_string in saved_output and old_string not in saved_output:
logger.info(f"✅ Successfully replaced and verified '{old_string}''{new_string}' in {filepath_new}")
return True
else:
logger.error(f"❌ Replacement failed to appear in written file {filepath_new}")
return False
#logger.info(f"Updated '{old_string}' to '{new_string}' in file: {filepath_new}")
except Exception as e:
logger.error(f"❌Error updating file {filepath} (writing to {filepath_new}): {e}")
def export_sorted_missing_lex(input_file1, input_file2, output_file):
"""
@@ -280,13 +370,71 @@ def export_sorted_missing_lex(input_file1, input_file2, output_file):
#for k in sorted_missing_keys:
# print(f"'{k}' => '{dict1[k]}',")
def get_new_filepath(filepath, panel_lex_output_dir, themes):
"""
Constructs a new file path in panel_lex_output_dir:
- Adds `.new` before the extension.
- If any theme from `themes` appears in the filepath,
the file is placed inside a subdirectory named after the theme
under panel_lex_output_dir.
Args:
filepath (str): Original file path.
panel_lex_output_dir (str): Output directory path.
themes (list of str): Theme names to scan for in filepath.
Returns:
str: The constructed new file path.
"""
original = Path(filepath)
new_name = original.stem + '.new' + original.suffix
theme_subdir = None
for theme in themes:
if theme in filepath:
theme_subdir = theme
break
output_dir = Path(panel_lex_output_dir)
if theme_subdir:
output_dir = output_dir / theme_subdir
# Ensure directory exists
os.makedirs(output_dir, exist_ok=True)
filepath_new = output_dir / new_name
return str(filepath_new)
def delete_lex_output_files(panel_lex_output_dir):
"""
Recursively deletes all .html.new.ep and .new.pm files in the given directory.
Returns a list of deleted file paths.
"""
patterns = ['**/*.html.new.ep', '**/*.new.pm']
deleted_files = []
for pattern in patterns:
# Use glob with recursive=True
files = glob.glob(os.path.join(panel_lex_output_dir, pattern), recursive=True)
for file_path in files:
if os.path.isfile(file_path):
try:
os.remove(file_path)
deleted_files.append(file_path)
except Exception as e:
print(f"Error deleting {file_path}: {e}")
return deleted_files
def main():
parser = argparse.ArgumentParser(description="Scan Mojolicious application files for lexical strings.")
parser = argparse.ArgumentParser(description="Scan and audit Mojolicious application files for lexical strings.")
parser.add_argument("-p", "--panel", required=True, help="Name of the Mojolicious panel (e.g., MyPanel).")
parser.add_argument("-s", "--system", default="SME11", help="System name (default: SME11).")
parser.add_argument("-e", "--edit", action="store_true", help="Enable editing of original files (default: False).")
parser.add_argument("-a", "--audit", action="store_true", help="Enable audit of all strings (default: False).")
parser.add_argument("-e", "--edit", action="store_true", help="Enable audit of single words (default: False).")
parser.add_argument("-l", "--lang", action="store_true", help="Enable other language processing (default: False).")
args = parser.parse_args()
@@ -294,8 +442,10 @@ def main():
system = args.system
edit_files = args.edit
do_lang = args.lang
do_audit = args.audit
logger.info(f"Lex scan for panel: {panel}, system: {system} audit: {do_audit} One word audit: {edit_files} Other lang: {do_lang}")
logger.warning(f"Lex scan for panel: {panel}, system: {system} edit: {edit_files} lang: {do_lang}\n")
validate_panel_name(panel)
controller_path = check_controller_file_exists(system, panel)
@@ -304,46 +454,137 @@ def main():
if prefix:
logger.info(f"Scanning application files for strings with prefix \'{prefix}\'...")
extracted_panel_strings = scan_application_files(system, panel, prefix)
logger.info(f"Deduplicated extracted panel strings: {len(extracted_panel_strings)} unique strings found.")
# Process panel-specific English lexical file
# Output to current working directory
panel_lex_output_dir = os.path.join(os.getcwd(), "output", panel.capitalize())
os.makedirs(panel_lex_output_dir, exist_ok=True)
logger.info(f"Deduplicated extracted panel strings: {len(extracted_panel_strings)} unique strings found")
full_base_path = get_full_base_path(system)
panel_lex_output_dir = os.path.join(os.getcwd(), "output", panel.capitalize())
general_lex_output_dir = os.path.join(os.getcwd(), "output", "General")
# Corrected capitalization for panel in path
en_lex_path = os.path.join(full_base_path, "lib/SrvMngr/I18N/Modules", panel, f"{panel.lower()}_en.lex.bak")
en_lex_new_path = os.path.join(panel_lex_output_dir, f"{panel.lower()}_en.lex.new")
if do_audit:
# Process panel-specific English lexical file
# Output to current working directory
os.makedirs(panel_lex_output_dir, exist_ok=True)
en_lex_data = read_lex_file(en_lex_path)
logger.info(f"Original English lex file lines: {len(en_lex_data)}")
new_en_lex_data = {}
# Corrected capitalization for panel in path
en_lex_path = os.path.join(full_base_path, "lib/SrvMngr/I18N/Modules", panel, f"{panel.lower()}_en.lex.bak")
en_lex_new_path = os.path.join(panel_lex_output_dir, f"{panel.lower()}_en.lex.new")
for lex_string in extracted_panel_strings.keys():
if lex_string in en_lex_data:
new_en_lex_data[lex_string] = en_lex_data[lex_string]
else:
#Replace rhs by the lhs less the prefix and no underlines, in lowercase (but capitalised)
# this may make a reasonable message, derived from the lex string id.
sometext = lex_string.replace(f"{prefix}_", "").replace("_", " ")
# Split into words
words = sometext.split()
# Lowercase all words, capitalize the first
if words:
words = [words[0].capitalize()] + [w.lower() for w in words[1:]]
sometext = ' '.join(words)
new_en_lex_data[lex_string] = sometext
en_lex_data = read_lex_file(en_lex_path)
logger.info(f"Original English lex file lines: {len(en_lex_data)}")
new_en_lex_data = {}
for lex_string in extracted_panel_strings.keys():
if lex_string in en_lex_data:
new_en_lex_data[lex_string] = en_lex_data[lex_string]
else:
#Replace rhs by the lhs less the prefix and no underlines, in lowercase (but capitalised)
# this may make a reasonable message, derived from the lex string id.
sometext = lex_string.replace(f"{prefix}_", "").replace("_", " ")
# Split into words
words = sometext.split()
# Lowercase all words, capitalize the first
if words:
words = [words[0].capitalize()] + [w.lower() for w in words[1:]]
sometext = ' '.join(words)
new_en_lex_data[lex_string] = sometext
write_lex_file(en_lex_new_path, new_en_lex_data)
logger.info(f"Generated {en_lex_new_path}. Lines in new file: {len(new_en_lex_data)}, Lines in original file: {len(en_lex_data)}")
#Create file of the ones not in the new lex file
output_diff_file = os.path.join(panel_lex_output_dir, f"{panel.lower()}_en.lex.diff")
export_sorted_missing_lex(en_lex_path, en_lex_new_path, output_diff_file)
logger.info("Scanning application files for general lexical strings...")
extracted_general_strings = scan_application_files(system, panel, prefix, scan_general=True)
logger.info(f"Deduplicated extracted general strings: {len(extracted_general_strings)} unique strings found.")
os.makedirs(general_lex_output_dir, exist_ok=True)
general_en_lex_path_orig = os.path.join(full_base_path, "lib/SrvMngr/I18N/Modules", "General", "general_en.lex.bak")
general_en_lex_new_path = os.path.join(general_lex_output_dir, "general_en.lex.new")
general_en_lex_data_orig = read_lex_file(general_en_lex_path_orig)
logger.info(f"Original general English lex file lines: {len(general_en_lex_data_orig)}")
new_general_en_lex_data = read_lex_file(general_en_lex_new_path)
for lex_string in extracted_general_strings.keys():
if lex_string in general_en_lex_data_orig:
new_general_en_lex_data[lex_string] = general_en_lex_data_orig[lex_string]
else:
sometext = lex_string.replace("_", " ")
sometext = sometext.replace("'",'"')
# Split into words
words = sometext.split()
# Lowercase all words, capitalize the first
if words:
words = [words[0].capitalize()] + [w.lower() for w in words[1:]]
sometext = ' '.join(words)
new_general_en_lex_data[lex_string] = sometext
write_lex_file(general_en_lex_new_path, new_general_en_lex_data)
logger.info(f"Generated {general_en_lex_new_path}. Lines in new file: {len(new_general_en_lex_data)}, Lines in original file: {len(general_en_lex_data_orig)}")
write_lex_file(en_lex_new_path, new_en_lex_data)
logger.info(f"Generated {en_lex_new_path}. Lines in new file: {len(new_en_lex_data)}, Lines in original file: {len(en_lex_data)}")
#Create file of the ones not in the new lex file
output_diff_file = os.path.join(panel_lex_output_dir, f"{panel.lower()}_en.lex.diff")
export_sorted_missing_lex(en_lex_path, en_lex_new_path, output_diff_file)
logger.info("")
if edit_files:
logger.info("Handling single-word lexical strings...")
# Paths for original and new lex files
en_lex_path = os.path.join(full_base_path, "lib/SrvMngr/I18N/Modules", panel, f"{panel.lower()}_en.lex")
en_lex_new_path = os.path.join(panel_lex_output_dir, f"{panel.lower()}_en.lex.new1")
en_lex_data = read_lex_file(en_lex_path)
logger.info(f"Original English panel specific lex file lines: {len(en_lex_data)}")
general_en_lex_path_orig = os.path.join(general_lex_output_dir, "general_en.lex.new")
general_en_lex_data_orig = read_lex_file(general_en_lex_path_orig)
general_en_lex_new_path = os.path.join(general_lex_output_dir, "general_en.lex.new1")
new_general_en_lex_data = read_lex_file(general_en_lex_new_path) or general_en_lex_data_orig
logger.info(f"General English general lex file lines: {len(general_en_lex_data_orig)}")
# Delete temp .html.new.ep / .new.pm files
delete_lex_output_files(panel_lex_output_dir)
# Find one-word entries based on the string value, not key
for lex_string, filepaths in extracted_panel_strings.items():
# Check if the lex_string exists in panel lex data
if lex_string in en_lex_data:
actual_string = en_lex_data[lex_string]
# Look for clean, single-word values (e.g. "Save" or "DeleteMe")
if actual_string.isalnum():
just_one_word = actual_string
# Move it to the general lex file if it's not there
if just_one_word not in new_general_en_lex_data:
new_general_en_lex_data[just_one_word] = just_one_word
logger.info(f"Added '{just_one_word}' to general lex: {general_en_lex_new_path}")
# Update source files that refer to this lex string
for filepath in filepaths:
# Compute a themed output filepath
filepath_new = get_new_filepath(filepath, panel_lex_output_dir, themes)
# Use existing modified version if available
filepath_old = filepath_new if os.path.isfile(filepath_new) else filepath
logger.info(f"Changing {lex_string} to {just_one_word} in file {filepath_old}{filepath_new}")
# Replace old lex_string with the actual string value
update_file_with_new_lexical_string(filepath_old, lex_string, just_one_word, filepath_new)
# Remove the entry from the panel lex file
en_lex_data.pop(lex_string)
# Write updated lex files
write_lex_file(general_en_lex_new_path, new_general_en_lex_data)
write_lex_file(en_lex_new_path, en_lex_data)
logger.info(f"New General English general lex file lines: {len(new_general_en_lex_data)} written to {general_en_lex_new_path}")
logger.info(f"New English panel-specific lex file lines: {len(en_lex_data)} written to {en_lex_new_path}")
logger.info("")
if do_lang:
# Panel specific lex files
languages_json_path = os.path.join(".", "Templates", "languages.json") # Corrected path
languages = read_languages_json(languages_json_path)
@@ -370,39 +611,7 @@ def main():
write_lex_file(lang_lex_new_path, new_lang_lex_data)
logger.info(f"Generated {lang_lex_new_path}. Lines in new file: {len(new_lang_lex_data)}, Lines in original file: {len(lang_lex_data)}")
logger.info("")
logger.info("Scanning application files for general lexical strings...")
extracted_general_strings = scan_application_files(system, panel, prefix, scan_general=True)
logger.info(f"Deduplicated extracted general strings: {len(extracted_general_strings)} unique strings found.")
general_lex_output_dir = os.path.join(os.getcwd(), "output", "General")
os.makedirs(general_lex_output_dir, exist_ok=True)
general_en_lex_path_orig = os.path.join(full_base_path, "lib/SrvMngr/I18N/Modules", "General", "general_en.lex.bak")
general_en_lex_new_path = os.path.join(general_lex_output_dir, "general_en.lex.new")
general_en_lex_data_orig = read_lex_file(general_en_lex_path_orig)
logger.info(f"Original general English lex file lines: {len(general_en_lex_data_orig)}")
new_general_en_lex_data = read_lex_file(general_en_lex_new_path)
for lex_string in extracted_general_strings.keys():
if lex_string in general_en_lex_data_orig:
new_general_en_lex_data[lex_string] = general_en_lex_data_orig[lex_string]
else:
sometext = lex_string.replace("_", " ")
sometext = sometext.replace("'",'"')
# Split into words
words = sometext.split()
# Lowercase all words, capitalize the first
if words:
words = [words[0].capitalize()] + [w.lower() for w in words[1:]]
sometext = ' '.join(words)
new_general_en_lex_data[lex_string] = sometext
write_lex_file(general_en_lex_new_path, new_general_en_lex_data)
logger.info(f"Generated {general_en_lex_new_path}. Lines in new file: {len(new_general_en_lex_data)}, Lines in original file: {len(general_en_lex_data_orig)}")
logger.info("")
if do_lang:
# General lex for for each language
for lang_entry in languages:
lang_code = lang_entry["code"]
if lang_code == "en":
@@ -425,36 +634,6 @@ def main():
write_lex_file(general_lang_lex_new_path, new_general_lang_lex_data)
logger.info(f"Generated {general_lang_lex_new_path}. Lines in new file: {len(new_general_lang_lex_data)}, Lines in original file: {len(general_lang_lex_data)}")
logger.info("")
if edit_files:
logger.info("Handling single-word lexical strings...")
for lex_string, filepaths in extracted_panel_strings.items():
if lex_string.startswith(f"{prefix}_"):
sometext_part = lex_string[len(prefix) + 1:]
if "_" not in sometext_part:
just_one_word = sometext_part
if just_one_word not in new_general_en_lex_data:
new_general_en_lex_data[just_one_word] = just_one_word
logger.info(f"Added \'{just_one_word}\' to {general_en_lex_new_path}")
write_lex_file(general_en_lex_new_path, new_general_en_lex_data)
for lang_entry in languages:
lang_code = lang_entry["code"]
if lang_code == "en":
continue
general_lang_lex_path = os.path.join(full_base_path, "lib/SrvMngr/I18N/Modules", "General", f"general_{lang_code}.lex")
general_lang_lex_new_path = os.path.join(general_lex_output_dir, f"general_{lang_code}.lex.new")
current_general_lang_lex_data = read_lex_file(general_lang_lex_new_path)
if just_one_word not in current_general_lang_lex_data:
current_general_lang_lex_data[just_one_word] = just_one_word
write_lex_file(general_lang_lex_new_path, current_general_lang_lex_data)
logger.info(f"Added \'{just_one_word}\' to {general_lang_lex_new_path}")
for filepath in filepaths:
update_file_with_new_lexical_string(filepath, lex_string, just_one_word)
else:
logger.error("Could not determine prefix, exiting.")
sys.exit(1)
@@ -462,7 +641,7 @@ def main():
if missing_files:
logger.warning("The following files were not found:")
for f in missing_files:
logger.warning(f"- {f}")
logger.warning(f"⚠️ - {f}")
if __name__ == "__main__":
main()