Initial Commit
This commit is contained in:
212
.gitignore
vendored
Normal file
212
.gitignore
vendored
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
# Copied from https://github.com/github/gitignore/blob/main/Python.gitignore
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[codz]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py.cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# UV
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
#uv.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
#poetry.toml
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||||
|
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||||
|
#pdm.lock
|
||||||
|
#pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# pixi
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||||
|
#pixi.lock
|
||||||
|
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||||
|
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||||
|
.pixi
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.envrc
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
# Abstra
|
||||||
|
# Abstra is an AI-powered process automation framework.
|
||||||
|
# Ignore directories containing user credentials, local state, and settings.
|
||||||
|
# Learn more at https://abstra.io/docs
|
||||||
|
.abstra/
|
||||||
|
|
||||||
|
# Visual Studio Code
|
||||||
|
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||||
|
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||||
|
# you could uncomment the following to ignore the entire vscode folder
|
||||||
|
# .vscode/
|
||||||
|
|
||||||
|
# Ruff stuff:
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# PyPI configuration file
|
||||||
|
.pypirc
|
||||||
|
|
||||||
|
# Cursor
|
||||||
|
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
||||||
|
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
||||||
|
# refer to https://docs.cursor.com/context/ignore-files
|
||||||
|
.cursorignore
|
||||||
|
.cursorindexingignore
|
||||||
|
|
||||||
|
# Marimo
|
||||||
|
marimo/_static/
|
||||||
|
marimo/_lsp/
|
||||||
|
__marimo__/
|
||||||
|
|
||||||
|
# Streamlit
|
||||||
|
.streamlit/secrets.toml
|
||||||
95
README.MD
Normal file
95
README.MD
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
# C++ Build Module in Python
|
||||||
|
|
||||||
|
|
||||||
|
This Python module builds C++ projects without CMake, aiming to be:
|
||||||
|
|
||||||
|
* **Fast**
|
||||||
|
* **Automatic**
|
||||||
|
* **Cool**
|
||||||
|
|
||||||
|
> **Why?** Because I'm just not a huge fan of CMake
|
||||||
|
|
||||||
|
|
||||||
|
-----
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
For this module to work without any modifications, your project needs to follow a specific directory structure:
|
||||||
|
|
||||||
|
* **`src/`**: Contains your source code files.
|
||||||
|
* **`include/`**: Holds your header files.
|
||||||
|
* **`build/`**: Where the output binaries will be placed.
|
||||||
|
|
||||||
|
Additionally, you'll need a configuration file named **`builds.json`**. Here's an example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "debug",
|
||||||
|
"build_type": "DEBUG",
|
||||||
|
"platform": "LINUX_x86_64",
|
||||||
|
"args": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"global_build_args": []
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
-----
|
||||||
|
|
||||||
|
## Supported `builds.json` Values
|
||||||
|
|
||||||
|
Here's a breakdown of the supported values within your `builds.json` file:
|
||||||
|
|
||||||
|
* **`"name"`**: Specifies the name of the resulting output binary.
|
||||||
|
* **`"build_type"`**: Controls the compiler flags used for the build.
|
||||||
|
* **Supported values**: `DEBUG`, `TRACE`, `RELEASE`, `PROFILE`, `RELWITHDEBINFO`, `FASTEST`
|
||||||
|
* **`"platform"`**: Defines the target platform architecture for compilation.
|
||||||
|
* **Supported values**: `LINUX_x86_64`, `LINUX_x86`, `LINUX_x86_64_V4`, `MACOS` (untested)
|
||||||
|
* **`"args"`**: A list of strings containing unique compiler flags and instructions specific to a particular build.
|
||||||
|
* **`"global_build_args"`**: A list of strings containing compiler flags and instructions that will be applied to *all* builds.
|
||||||
|
|
||||||
|
-----
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
This project is designed to be portable (kinda).
|
||||||
|
- Clone module into C++ project with supported layout.
|
||||||
|
- Run with `python compile`
|
||||||
|
|
||||||
|
### Example Usage
|
||||||
|
```bash
|
||||||
|
oscarg@ws01:~/Development/rpg-game$ ls
|
||||||
|
build builds.json compile include src
|
||||||
|
oscarg@ws01:~/Development/rpg-game$ cat builds.json
|
||||||
|
{
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "debug",
|
||||||
|
"build_type": "DEBUG",
|
||||||
|
"platform": "LINUX_x86_64",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "trace",
|
||||||
|
"build_type": "TRACE",
|
||||||
|
"platform": "LINUX_x86_64",
|
||||||
|
"args": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"global_build_args": ["-lSDL3", "-lSDL3_image", "-lm"]
|
||||||
|
}oscarg@ws01:~/Development/rpg-game$ python compile
|
||||||
|
[CppBuilder] 2025-07-07 14:00:37 - DEBUG - 'debug' config loaded from file.
|
||||||
|
[CppBuilder] 2025-07-07 14:00:37 - DEBUG - 'trace' config loaded from file.
|
||||||
|
[CppBuilder] 2025-07-07 14:00:37 - INFO - Configurations successfully loaded from 'builds.json'.
|
||||||
|
[CppBuilder] 2025-07-07 14:00:37 - INFO - Starting builds...
|
||||||
|
[MultiprocessWorker] 2025-07-07 14:00:37 - DEBUG - Adding task with callable _build_worker
|
||||||
|
[MultiprocessWorker] 2025-07-07 14:00:37 - DEBUG - Adding task with callable _build_worker
|
||||||
|
[MultiprocessWorker] 2025-07-07 14:00:37 - DEBUG - Starting with 2 tasks and max 4 processes.
|
||||||
|
[MultiprocessWorker] 2025-07-07 14:00:37 - DEBUG - All multiprocessing tasks completed.
|
||||||
|
[CppBuilder] 2025-07-07 14:00:37 - INFO - SUCCESS: Compiled 'build/trace_linux_x86-64'
|
||||||
|
[CppBuilder] 2025-07-07 14:00:37 - INFO - SUCCESS: Compiled 'build/debug_linux_x86-64'
|
||||||
|
oscarg@ws01:~/Development/rpg-game$ ls build/
|
||||||
|
debug_linux_x86-64 res trace_linux_x86-64
|
||||||
|
oscarg@ws01:~/Development/rpg-game$
|
||||||
|
```
|
||||||
9
__main__.py
Normal file
9
__main__.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from util.builder.cpp_builder import CppBuilder
|
||||||
|
|
||||||
|
def main():
|
||||||
|
builder = CppBuilder()
|
||||||
|
builder.load_config("builds.json")
|
||||||
|
builder.build()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
122
util/builder/cpp_builder.py
Normal file
122
util/builder/cpp_builder.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
from util.generic.multithreader import MultiprocessWorker, WorkerProcess, WorkerReturn, Spinners
|
||||||
|
from util.generic.log import Log
|
||||||
|
from util.builder.info import Build, BuildType, Platform
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
class CppBuilder:
|
||||||
|
def __init__(self, builds: List[Build] = [], log: Log = None, build_dir: str = "build/", source_dir: str = ".", include_dir: str = "include/"):
|
||||||
|
if log is None: log = Log(self.__class__.__name__, Log.Level.DEBUG)
|
||||||
|
self.logger = log.create_logger()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
self._builds:List[Build] = builds
|
||||||
|
self._build_dir: str = build_dir
|
||||||
|
self._source_dir: str = source_dir
|
||||||
|
self._include_dir: str = include_dir
|
||||||
|
|
||||||
|
self.multiprocesser = MultiprocessWorker(spinner_set=Spinners.SPIN_OPEN_CUBE)
|
||||||
|
|
||||||
|
def find_source_files(self, root_dir:str) -> List[str]:
|
||||||
|
cpp_files = []
|
||||||
|
for root, _, files in os.walk(root_dir):
|
||||||
|
for file in files:
|
||||||
|
if file.endswith('.c') or file.endswith('.cpp'):
|
||||||
|
relative_path = os.path.relpath(os.path.join(root, file), root_dir)
|
||||||
|
cpp_files.append(relative_path)
|
||||||
|
return cpp_files
|
||||||
|
|
||||||
|
def build(self):
|
||||||
|
if len(self._builds) > 1:
|
||||||
|
self.logger.log(Log.Level.INFO, "Starting builds...")
|
||||||
|
else:
|
||||||
|
self.logger.log(Log.Level.INFO, "Starting build...")
|
||||||
|
|
||||||
|
os.makedirs(self._build_dir, exist_ok=True)
|
||||||
|
|
||||||
|
for build in self._builds:
|
||||||
|
instruction = [
|
||||||
|
build.platform.value.compiler,
|
||||||
|
"-o", os.path.join(self._build_dir, f"{build.name}_{build.platform.value}"),
|
||||||
|
f"-march={build.platform.value.architecture}",
|
||||||
|
"-I", self._include_dir,
|
||||||
|
*self.find_source_files(self._source_dir)
|
||||||
|
]
|
||||||
|
|
||||||
|
instruction.append(build.platform.value.code_specification)
|
||||||
|
|
||||||
|
instruction.extend(build.type.value)
|
||||||
|
instruction.extend(build.additional_instructions)
|
||||||
|
|
||||||
|
self.multiprocesser.add_task(WorkerProcess(CppBuilder._build_worker, instruction))
|
||||||
|
|
||||||
|
results: List[WorkerReturn] = self.multiprocesser.run()
|
||||||
|
|
||||||
|
for res in results:
|
||||||
|
res.output_result(self.logger, r'-o (\S+)', "SUCCESS: Compiled '{output}'")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_worker(instruction):
|
||||||
|
try:
|
||||||
|
process = subprocess.run(instruction, capture_output=True, text=True, check=True)
|
||||||
|
return WorkerReturn(True, ' '.join(instruction), process.stdout, process.stderr, None)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
return WorkerReturn(False, ' '.join(instruction), e.stdout, e.stderr, str(e))
|
||||||
|
except Exception as e:
|
||||||
|
return WorkerReturn(False, ' '.join(instruction), "", "", str(e))
|
||||||
|
|
||||||
|
def load_config(self, file_path: str):
|
||||||
|
try:
|
||||||
|
with open(file_path, "r") as file:
|
||||||
|
config = json.load(file)
|
||||||
|
except FileNotFoundError:
|
||||||
|
self.logger.log(Log.Level.ERROR, f"File '{file_path}' not found.")
|
||||||
|
return False
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
self.logger.log(Log.Level.ERROR, f"File '{file_path}' does not contain valid JSON data.")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.log(Log.Level.ERROR, f"An unexpected error occurred: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if config is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
configurations = config["configurations"]
|
||||||
|
global_build_args = config["global_build_args"]
|
||||||
|
|
||||||
|
for i in configurations:
|
||||||
|
build_name = i["name"]
|
||||||
|
build_type_str = i["build_type"]
|
||||||
|
platform_str = i["platform"]
|
||||||
|
|
||||||
|
if build_type_str not in BuildType.__members__:
|
||||||
|
self.logger.log(
|
||||||
|
Log.Level.ERROR,
|
||||||
|
f"Invalid build_type '{build_type_str}' for build '{build_name}'. Skipping..."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if platform_str not in Platform.__members__:
|
||||||
|
self.logger.log(
|
||||||
|
Log.Level.ERROR,
|
||||||
|
f"Invalid platform '{platform_str}' for build '{build_name}'. Skipping..."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
build_type = BuildType[build_type_str]
|
||||||
|
platform = Platform[platform_str]
|
||||||
|
|
||||||
|
self._builds.append(Build(
|
||||||
|
build_name,
|
||||||
|
build_type,
|
||||||
|
platform,
|
||||||
|
i["args"] + global_build_args
|
||||||
|
))
|
||||||
|
self.logger.log(Log.Level.DEBUG, f"'{build_name}' config loaded from file.")
|
||||||
|
|
||||||
|
self.logger.log(Log.Level.INFO, f"Configurations successfully loaded from '{file_path}'.")
|
||||||
36
util/builder/info.py
Normal file
36
util/builder/info.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
class BuildType(Enum):
|
||||||
|
DEBUG: List[str] = ["-g", "-O0", "-DLOG_LEVEL=DEBUG"]
|
||||||
|
TRACE: List[str] = ["-g", "-O0", "-DLOG_LEVEL=TRACE"]
|
||||||
|
RELEASE: List[str] = ["-O3", "-DREL_BUILD", "-DLOG_LEVEL=ERROR"]
|
||||||
|
PROFILE: List[str] = ["-O2", "-g", "-pg", "-DLOG_LEVEL=ERROR"]
|
||||||
|
RELWITHDEBINFO: List[str] = ["-O2", "-g", "-DLOG_LEVEL=DEBUG"]
|
||||||
|
FASTEST: List[str] = ["-Ofast", "-ffast-math", "-xHost", "-DLOG_LEVEL=ERROR"]
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PlatformInfo:
|
||||||
|
name: str
|
||||||
|
compiler: str
|
||||||
|
architecture: str
|
||||||
|
file_type: str
|
||||||
|
code_specification: str
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.name}_{self.architecture}{self.file_type}"
|
||||||
|
|
||||||
|
class Platform(Enum):
|
||||||
|
LINUX_x86_64 = PlatformInfo("linux", "clang", "x86-64", "", "-DSPEC_LINUX")
|
||||||
|
LINUX_x86 = PlatformInfo("linux", "clang", "x86", "", "-DSPEC_LINUX")
|
||||||
|
LINUX_x86_64_V4 = PlatformInfo("linux", "clang", "x86-64-v4", "", "-DSPEC_LINUX")
|
||||||
|
MACOS = PlatformInfo("macos", "gcc", "arm", "", "-DSPEC_DARWIN")
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Build:
|
||||||
|
name: str
|
||||||
|
type: BuildType
|
||||||
|
platform: PlatformInfo
|
||||||
|
additional_instructions: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
37
util/generic/log.py
Normal file
37
util/generic/log.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from logging.handlers import RotatingFileHandler
|
||||||
|
from enum import IntEnum
|
||||||
|
from typing import Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Log:
|
||||||
|
class Level(IntEnum):
|
||||||
|
CRITICAL = 50
|
||||||
|
FATAL = CRITICAL
|
||||||
|
ERROR = 40
|
||||||
|
WARNING = 30
|
||||||
|
WARN = WARNING
|
||||||
|
INFO = 20
|
||||||
|
DEBUG = 10
|
||||||
|
NOTSET = 0
|
||||||
|
|
||||||
|
name: str
|
||||||
|
level: Level
|
||||||
|
file: Optional[str] = None
|
||||||
|
format: str = '[%(name)s] %(asctime)s - %(levelname)s - %(message)s'
|
||||||
|
date_format: str = '%Y-%m-%d %H:%M:%S'
|
||||||
|
|
||||||
|
def create_logger(self):
|
||||||
|
logger = logging.getLogger(self.name)
|
||||||
|
logger.setLevel(self.level)
|
||||||
|
formatter = logging.Formatter(self.format, self.date_format)
|
||||||
|
self._add_handler(logger, logging.StreamHandler(), formatter)
|
||||||
|
if self.file:
|
||||||
|
self._add_handler(logger, RotatingFileHandler(self.file, maxBytes=1000000, backupCount=3), formatter)
|
||||||
|
return logger
|
||||||
|
|
||||||
|
def _add_handler(self, logger: logging.Logger, handler: logging.Handler, formatter: logging.Formatter):
|
||||||
|
handler.setLevel(self.level)
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
logger.addHandler(handler)
|
||||||
129
util/generic/multithreader.py
Normal file
129
util/generic/multithreader.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
from util.generic.log import Log
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
from multiprocessing import Pool, Manager
|
||||||
|
import threading
|
||||||
|
from logging import Logger
|
||||||
|
from typing import Optional
|
||||||
|
import re
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Spinner:
|
||||||
|
char_list: list[str]
|
||||||
|
delay: float
|
||||||
|
|
||||||
|
class Spinners(Enum):
|
||||||
|
BASIC = Spinner(["|", "/", "-", "\\"], 0.1)
|
||||||
|
SPIN_TRI_BLOCK = Spinner(["▙", "▛", "▜", "▟"], 0.1)
|
||||||
|
SPIN_RIGHT_ANGLE = Spinner(["🮤", "🮧", "🮥", "🮦"], 0.1)
|
||||||
|
SPIN_OPEN_CUBE = Spinner(["🮪", "🮫", "🮭", "🮬"], 0.1)
|
||||||
|
SPIN_DOTS = Spinner(["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"], 0.1)
|
||||||
|
GUY_DANCING = Spinner(["🯇", "🯈"], 0.3)
|
||||||
|
GUY_JUMPING = Spinner(["🯅", "🯆"], 0.3)
|
||||||
|
SHIFTING_PATTERN = Spinner(["🮕", "🮖"], 0.1)
|
||||||
|
SHIFTING_GRADIENT = Spinner(["░▒▓█▓▒░", "▒▓█▓▒░░", "▓█▓▒░▒▓", "█▓▒░▒▓█", "▓▒░▒▓█▓", "░░▒▓█▓▒"], 0.1)
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class WorkerProcess:
|
||||||
|
function: callable
|
||||||
|
instructions: tuple
|
||||||
|
|
||||||
|
def dispatch_worker(worker_process: WorkerProcess):
|
||||||
|
try:
|
||||||
|
result = worker_process.function(worker_process.instructions)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
raise
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class WorkerReturn:
|
||||||
|
status: bool
|
||||||
|
command: str
|
||||||
|
stdout: str = ""
|
||||||
|
stderr: str = ""
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
def output_result(self, logger: Logger, success_regex: str = None, success_message: str = None):
|
||||||
|
if self.status:
|
||||||
|
result_level = Log.Level.INFO
|
||||||
|
if success_regex and success_message:
|
||||||
|
success_match = re.search(success_regex, self.command)
|
||||||
|
success_string = success_match.group(1) if success_match else "unknown_target"
|
||||||
|
logger.log(result_level, success_message.format(output = success_string))
|
||||||
|
else:
|
||||||
|
logger.log(result_level, f"SUCCESS: {self.command}")
|
||||||
|
else:
|
||||||
|
result_level = Log.Level.ERROR
|
||||||
|
logger.log(result_level, f"FAILURE: {self.command}")
|
||||||
|
if self.error:
|
||||||
|
logger.log(result_level, f"Error: {self.error}")
|
||||||
|
|
||||||
|
if self.stdout:
|
||||||
|
logger.log(result_level, f"STDOUT:\n{self.stdout}")
|
||||||
|
|
||||||
|
if self.stderr:
|
||||||
|
logger.log(result_level, f"STDERR:\n{self.stderr}")
|
||||||
|
|
||||||
|
class MultiprocessWorker:
|
||||||
|
def __init__(self, logger: Logger = None, max_processes=4, spinner_set:Spinners = Spinners.BASIC):
|
||||||
|
if logger is None: logger = Log(self.__class__.__name__, Log.Level.DEBUG).create_logger()
|
||||||
|
self.logger = logger
|
||||||
|
|
||||||
|
self.tasks: list[WorkerProcess] = []
|
||||||
|
self.max_processes = max_processes
|
||||||
|
self.spinner:Spinner = spinner_set.value
|
||||||
|
self.manager = Manager()
|
||||||
|
self.active_processes = self.manager.Value('i', 0)
|
||||||
|
self.stop_display = self.manager.Event()
|
||||||
|
self._process_lock = threading.Lock()
|
||||||
|
|
||||||
|
def add_tasks(self, instructions: list[WorkerProcess]):
|
||||||
|
self.logger.log(Log.Level.DEBUG, f"Adding {len(instructions)} tasks.")
|
||||||
|
self.tasks.extend(instructions)
|
||||||
|
|
||||||
|
def add_task(self, instruction: WorkerProcess):
|
||||||
|
self.logger.log(Log.Level.DEBUG, f"Adding task with callable {instruction.function.__name__}")
|
||||||
|
self.tasks.append(instruction)
|
||||||
|
|
||||||
|
def _spinner_display(self, total_tasks, results):
|
||||||
|
idx = 0
|
||||||
|
list_length = len(self.spinner.char_list)
|
||||||
|
|
||||||
|
while len(results) < total_tasks:
|
||||||
|
sys.stdout.write(f"\r{self.spinner.char_list[idx]} | Completed: {len(results)}/{total_tasks} | Time: {datetime.now().strftime('%H:%M:%S')} ")
|
||||||
|
sys.stdout.flush()
|
||||||
|
idx = (idx + 1) % list_length
|
||||||
|
time.sleep(self.spinner.delay)
|
||||||
|
|
||||||
|
sys.stdout.write("\r")
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.logger.log(Log.Level.DEBUG, f"Starting with {len(self.tasks)} tasks and max {self.max_processes} processes.")
|
||||||
|
results = []
|
||||||
|
spinner_thread = threading.Thread(target=self._spinner_display, args=(len(self.tasks), results))
|
||||||
|
spinner_thread.daemon = True
|
||||||
|
spinner_thread.start()
|
||||||
|
|
||||||
|
try:
|
||||||
|
with Pool(self.max_processes) as pool:
|
||||||
|
for wp in self.tasks:
|
||||||
|
pool.apply_async(
|
||||||
|
dispatch_worker,
|
||||||
|
args=(wp,),
|
||||||
|
callback=lambda res: results.append(res)
|
||||||
|
)
|
||||||
|
pool.close()
|
||||||
|
pool.join()
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.exception("Unexpected error during multiprocessing run")
|
||||||
|
finally:
|
||||||
|
self.stop_display.set()
|
||||||
|
spinner_thread.join()
|
||||||
|
|
||||||
|
self.logger.log(Log.Level.DEBUG, "All multiprocessing tasks completed.")
|
||||||
|
return results
|
||||||
Reference in New Issue
Block a user