4@brief This script preprocesses source files for use with Log_t
8@copyright Copyright (c) 2022
10This script works by first duplicating source files to the build folder. \n
11Then it scans each file for calls to a Log function and modifies them as follows. \n
13If the call has a string for `LOG_TAG` parameter, give that string a unique integer ID and replace it with that integer. \n
14If the ID is not a string, leave the variable alone. \n
16Replace the call's `LOG_MSG` string with a unique ID as well. \n
17NOTE: The `LOG_MSG` parameter must always be an inline string. \n
19`LOG_TAG`s and `LOG_MSG`s do not share IDs. \n
23Log(ID, "My Message"); -> Log(ID, 1);
24Log("My Str ID", "My Message"); -> Log(1, 1);
27Calls to Log functions also have the option to send a number with a third parameter. \n
31Log("My Str ID", "My Message", 56); -> Log(1, 1, 56);
32Log(ID, "My Message", A_Num_Var); -> Log(ID, 1, A_Num_Var);
35Declarations of `LOG_TAG` also have their strings replaced with a unique ID. \n
36NOTE: Definition of `LOG_TAG`s must always be an inline string. \n
40LOG_TAG TAG = "Logging Tag"; -> LOG_TAG TAG = 2;
43A special case has been made to also allocate and replace string that call the following macro
49Where x is the string, it will be given a unique ID and replaced with said ID as if it were being called by a Logging function.
50This is useful where one wishes to generate log functions using the C preprocessor.
51Note, however, if this script is not run the macro should still allow everything to compile normally, leaving the string untouched
66from os.path
import join
as join_path
67from typing
import Callable
69from vs_conf
import Settings
78LIB_PATH = join_path(
"libraries",
"Log")
79LIBRARIES_NAME =
"libraries"
80WORKING_DIRECTORY_OFFSET = join_path(
"build",
"Pre_Build",
"")
81FILE_OUTPUT_PATH =
"log_lookup.json"
83BYPASS_SCRIPT = os.path.exists(
"script.disable")
87SOURCE_DEST_NAME = f
"{WORKING_DIRECTORY_OFFSET}{SOURCE_NAME}"
88LIBRARIES_DEST_NAME = f
"{WORKING_DIRECTORY_OFFSET}{LIBRARIES_NAME}"
90INCLUDED_FILE_TYPES = (
".c",
".cpp",
".h",
".hpp",
".t",
".tpp",
".s",
".def")
93async def ingest_files(progress_func: Callable[[
None],
None], entries: tuple[FileEntry]) ->
None:
94 for file
in entries[0]:
98 except Exception
as e:
99 file.newError(e,
"Thread Error", file.name)
102def run_ingest_files(progress_func: Callable[[
None],
None], *entries: FileEntry) ->
None:
103 asyncio.run(ingest_files(progress_func, entries))
106Files: set[FileEntry] = set()
112def allocate_files(path: str, offset: str) ->
None:
113 blacklist = Util.get_library_blacklist()
114 model: dict[str, str]
118 except json.JSONDecodeError:
119 sys.exit(Text.error(
"Error loading settings file, consider running the 'VS Setup' task"))
121 if model
in blacklist:
122 blacklist = blacklist[model]
126 for subdir, _, files
in os.walk(path):
128 for directory
in blacklist:
129 if str(subdir).startswith(directory):
130 Excluded_dirs.add(directory)
135 for filename
in files:
136 if pathlib.Path(filename).suffix.lower()
not in INCLUDED_FILE_TYPES:
138 filepath = join_path(subdir, filename)
139 rawpath = subdir + os.sep
140 if BYPASS_SCRIPT
or rawpath.startswith(LIB_PATH):
141 Util.sync_file(filepath, offset, rawpath, suppress=
True)
143 file_entry = FileEntry(rawpath, filepath, filename, offset)
144 Files.add(file_entry)
145 FileRefs.add(file_entry)
147 for directory
in blacklist:
148 rm_path = join_path(offset, directory)
149 if os.path.exists(rm_path):
150 shutil.rmtree(rm_path)
153def dole_files(count: int, progress_func: Callable[[
None],
None]) ->
None:
155 file_set: set[FileEntry] = set()
159 while len(Files) != 0
and i != count:
160 file_set.add(Files.pop())
163 if len(file_set) != 0:
164 Threads.add(threading.Thread(target=run_ingest_files, args=(progress_func, file_set)))
170def begin_scan() -> None:
177 IDMatch.clear_blanks()
180def printResults() -> None:
186 for dir
in Excluded_dirs:
188 extStr += f
" {dir}\n"
194 print(Text.header(
"\nExcluded Folders:"))
195 print(Text.yellow(extStr.strip(
"\n")))
197 print(Text.underline(Text.yellow(
" {} more folder{}".format(m,
"s" if m > 1
else ""))))
207 extStr += f
" {f.name}\n"
211 if len(f.errors) > 0:
215 print(Text.header(
"\nModified Files:"))
216 print(Text.green(extStr.strip(
"\n")))
218 print(Text.underline(Text.green(
" {} more file{}".format(m,
"s" if m > 1
else ""))))
236 print(Text.header(
"\nFile Errors:"))
237 print(extStr.strip(
"\n"))
240 print(Text.underline(Text.red(
" {} more error{}".format(m,
"s" if m > 1
else ""))))
245 Util.check_git_submodules(INCLUDED_FILE_TYPES)
247 Util.touch(SOURCE_DEST_NAME)
248 Util.touch(LIBRARIES_DEST_NAME)
250 allocate_files(SOURCE_NAME, WORKING_DIRECTORY_OFFSET)
251 allocate_files(LIBRARIES_NAME, WORKING_DIRECTORY_OFFSET)
253 if not BYPASS_SCRIPT:
256 print(Text.warning(f
"Available Ram: {Util.available_ram()} GBs\n"))
258 prehash = Util.hashFile(FILE_OUTPUT_PATH)
260 print(f
"Files to search: {len(FileRefs)}")
262 prog = ProgressBar(len(Files), Text.important(
"Completed Files:"))
264 dole_files(8, prog.progress)
266 print(f
"Threads to run: {len(Threads)}\n\n")
273 IDMatch.save_lookup(FILE_OUTPUT_PATH)
274 newhash = Util.hashFile(FILE_OUTPUT_PATH)
275 if Util.FILES_CHANGED:
276 print(Text.important(
"\nNote: Files have changed, rebuild inbound"))
277 if newhash != prehash:
278 print(Text.really_important(
"\nNote: Mapped values have changed"))
282 Util.encode_log_map(f
"{WORKING_DIRECTORY_OFFSET}{LIB_PATH}")
294if __name__ ==
"__main__":
dict[str, str] load_json()
Load the settings JSON.