Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
AAriam committed Sep 21, 2024
1 parent 5ac7064 commit d4e7078
Show file tree
Hide file tree
Showing 6 changed files with 168 additions and 101 deletions.
33 changes: 16 additions & 17 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,28 +17,27 @@ namespaces = true
# ----------------------------------------- Project Metadata -------------------------------------
#
[project]
version = "0.0.0.dev269"
version = "0.0.0.dev270"
name = "ControlMan"
dependencies = [
"packaging >= 23.2, < 24",
"jsonschema >= 4.23, < 5",
"trove-classifiers",
"readme-renderer[md]",
"pylinks",
"pycolorit",
"pybadger",
"ConventionalCommits",
"actionman",
"loggerman",
"pyserials",
"gittidy",
"FileEx",
"PkgData",
"PyShellMan",
"PySyntax",
"trove-classifiers >= 2024.9.12",
"PyLinks == 0.0.0.dev27",
"PyColorIT == 0.0.0.dev5",
"PyBadger == 0.0.0.dev8",
"ConventionalCommits == 0.0.0.dev3",
"ActionMan == 0.0.0.dev15",
"LoggerMan == 0.0.0.dev15",
"PySerials == 0.0.0.dev7",
"GitTidy == 0.0.0.dev7",
"FileEx == 0.0.0.dev1",
"PkgData == 0.0.0.dev3",
"PyShellMan == 0.0.0.dev2",
"PySyntax == 0.0.0.dev2",
"referencing == 0.35.1",
"jsonpath-ng == 1.6.1",
"ExceptionMan",
"mdit",
"ExceptionMan == 0.0.0.dev4",
"MDit == 0.0.0.dev3",
]
requires-python = ">=3.10"
132 changes: 92 additions & 40 deletions src/controlman/cache_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from loggerman import logger as _logger
import pyserials as _ps
import mdit as _mdit

from controlman import exception as _exception, const as _const
from controlman import data_validator as _data_validator
Expand All @@ -13,56 +14,89 @@ class CacheManager:

_TIME_FORMAT = "%Y_%m_%d_%H_%M_%S"

@_logger.sectioner("Initialize Cache Manager")
def __init__(
self,
path_repo: _Path,
retention_hours: dict[str, float],
path_local_dir: _Path | str | None = None,
retention_hours: dict[str, float] | None = None,
):
self._path = path_repo / _const.FILEPATH_METADATA_CACHE
self._retention_hours = retention_hours
if not self._path.is_file():
_logger.info("Caching", f"No cache file found at '{self._path}'; initialized new cache.")
self._cache = {}

def log_msg_new_cache(reason: str | None = None, traceback: bool = False):
msg = _mdit.inline_container(
"The provided filepath ",
_mdit.element.code_span(str(self._path)),
f" for control center cache {reason}. ",
"Initialized a new cache.",
) if reason else "No filepath provided for control center cache. Initialized a new cache."
log_content = [msg]
if traceback:
log_content.append(_logger.traceback())
_logger.warning(log_title, *log_content)
return

log_title = "Cache Initialization"

self._cache = {}
self._retention_hours = retention_hours or {}

if path_local_dir:
self._path = _Path(path_local_dir).resolve() / _const.DIRNAME_LOCAL_CACHE / _const.DIRNAME_LOCAL_REPODYNAMICS / _const.FILENAME_METADATA_CACHE
if not self._path.is_file():
log_msg_new_cache("does not exist")
else:
try:
self._cache = _ps.read.yaml_from_file(path=self._path)
except _ps.exception.read.PySerialsReadException as e:
log_msg_new_cache("is corrupted", traceback=True)
try:
_data_validator.validate(
data=self._cache,
schema="cache",
)
except _exception.ControlManException:
log_msg_new_cache("is invalid", traceback=True)
else:
_logger.success(
log_title,
_mdit.inline_container(
"Loaded control center cache from ",
_mdit.element.code_span(str(self._path)),
)
)
else:
try:
self._cache = _ps.read.yaml_from_file(path=self._path)
except _ps.exception.read.PySerialsReadException as e:
self._cache = {}
_logger.info(
"Caching", f"API cache file at '{self._path}' is corrupted; initialized new cache."
)
_logger.debug("Cache Corruption Details", str(e))
try:
_data_validator.validate(
data=self._cache,
schema="cache",
)
except _exception.ControlManException as e:
self._cache = {}
_logger.info(
"Caching", f"API cache file at '{self._path}' is invalid; initialized new cache."
)
_logger.debug("Cache Validation Details", str(e))
self._path = None
log_msg_new_cache()
return

def get(self, typ: str, key: str):
log_title = _mdit.inline_container(
"Cache Retrieval for ", _mdit.element.code_span(f"{typ}.{key}")
)
if typ not in self._retention_hours:
_logger.warning(
log_title,
_mdit.inline_container(
"Retention hours not defined for cache type ",
_mdit.element.code_span(typ),
". Skipped cache retrieval."
)
)
return
log_title = f"Retrieve '{typ}.{key}' from API cache"
item = self._cache.get(typ, {}).get(key)
if not item:
_logger.info(log_title, "Item not found")
_logger.info(log_title, "Item not found.")
return
timestamp = item.get("timestamp")
if timestamp and self._is_expired(typ, timestamp):
_logger.info(
log_title,
f"Item expired; timestamp: {timestamp}, retention hours: {self._retention_hours}"
f"Item expired.\n- Timestamp: {timestamp}\n- Retention Hours: {self._retention_hours}"
)
return
_logger.info(log_title, f"Item found")
_logger.debug(log_title, str(item['data']))
_logger.info(
log_title,
"Item found.",
_mdit.element.code_block(_ps.write.to_yaml_string(item["data"]), language="yaml")
)
return item["data"]

def set(self, typ: str, key: str, value: dict | list | str | int | float | bool):
Expand All @@ -71,17 +105,35 @@ def set(self, typ: str, key: str, value: dict | list | str | int | float | bool)
"data": value,
}
self._cache.setdefault(typ, {})[key] = new_item
_logger.info(f"Set API cache for '{key}'")
_logger.debug("Cache Data", str(new_item))
_logger.info(
_mdit.inline_container(
"Cache Set for ",
_mdit.element.code_span(f"{typ}.{key}")
),
_mdit.element.code_block(_ps.write.to_yaml_string(value), language="yaml")
)
return

def save(self):
_ps.write.to_yaml_file(
data=self._cache,
path=self._path,
make_dirs=True,
)
_logger.debug("Save API cache file", self._path)
log_title = "Cache Save"
if self._path:
_ps.write.to_yaml_file(
data=self._cache,
path=self._path,
make_dirs=True,
)
_logger.success(
log_title,
_mdit.inline_container(
"Saved control center cache to ",
_mdit.element.code_span(str(self._path)),
)
)
else:
_logger.warning(
log_title,
"No filepath provided for control center cache. Skipped saving cache."
)
return

def _is_expired(self, typ: str, timestamp: str) -> bool:
Expand Down
3 changes: 2 additions & 1 deletion src/controlman/center_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,9 @@ def __init__(

self._path_root = self._git.repo_path
self._hook_manager = _HookManager(dir_path=self._path_cc / const.DIRNAME_CC_HOOK)
local_dir_path = self._data_before.get("local.path")
self._cache_manager: CacheManager = CacheManager(
path_repo=self._path_root,
path_local_dir=self._path_root / local_dir_path if local_dir_path else None,
retention_hours=self._data_before.get("control.cache.retention_hours", {}),
)

Expand Down
6 changes: 5 additions & 1 deletion src/controlman/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@
# ControlMan Constants
DIRPATH_CC_DEFAULT = ".control"

DIRNAME_LOCAL_CACHE = "cache"
DIRNAME_LOCAL_REPORT = "reports"
DIRNAME_LOCAL_REPODYNAMICS = "RepoDynamics"

FILEPATH_METADATA = ".github/.control/.metadata.json"
FILEPATH_METADATA_CACHE = ".github/.control/.metadata_cache.yaml"
FILENAME_METADATA_CACHE = ".metadata_cache.yaml"
FILEPATH_LOCAL_CONFIG = ".github/.control/local_config.yaml"

DIRNAME_CC_HOOK = "hook"
Expand Down
94 changes: 53 additions & 41 deletions src/controlman/data_gen/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,61 +16,73 @@ def __init__(self, data: _ps.NestedDict, source_path: _Path):
return

def generate(self):
self._process_website_toctrees()
self._process_frontmatter()
return

@_logger.sectioner("Website Sections")
def _process_website_toctrees(self) -> None:
@_logger.sectioner("Website Pages")
def _process_frontmatter(self) -> None:
pages = {}
blog_pages = {}
blog = {}

for md_filepath in self._path.rglob("*.md", case_sensitive=False):
if not md_filepath.is_file():
continue
rel_path = str(md_filepath.relative_to(self._path).with_suffix(""))
rel_path = md_filepath.relative_to(self._path)
dirhtml_path = str(rel_path.with_suffix("")).removesuffix("/index")
text = md_filepath.read_text()
frontmatter = _mdit.parse.frontmatter(text)
frontmatter = _mdit.parse.frontmatter(text) or {}
if "ccid" in frontmatter:
pages[_pl.string.to_slug(frontmatter["ccid"])] = {
"title": _mdit.parse.title(text),
"path": rel_path,
"url": f"{self._data['web.url.home']}/{rel_path}",
"path": dirhtml_path,
"url": f"{self._data['web.url.home']}/{dirhtml_path}",
}
for key in ["category", "tags"]:
key_val = frontmatter.get(key)
if not key_val:
val = frontmatter.get(key)
if not val:
continue
if isinstance(key_val, str):
key_val = [item.strip() for item in key_val.split(",")]
blog_pages.setdefault(rel_path, {}).setdefault(key, []).extend(key_val)
if isinstance(val, str):
val = [item.strip() for item in val.split(",")]
if not isinstance(val, list):
_logger.warning(
_mdit.inline_container(
"Invalid webpage frontmatter: ",
_mdit.element.code_span(str(rel_path)),
),
_mdit.inline_container(
"Invalid frontmatter value for ",
_mdit.element.code_span(key),
" :"),
_mdit.element.code_block(
_ps.write.to_yaml_string(val, end_of_file_newline=False),
language="yaml",
),
)
blog.setdefault(key, []).extend(val)
if "blog" not in pages:
self._data["web.page"] = pages
return
blog_path = _Path(pages["blog"]["path"]).parent
blog_path_str = str(blog_path)
blog_pages_final = {}
for potential_post_page_path, keywords_and_tags in blog_pages.items():
try:
_Path(potential_post_page_path).relative_to(blog_path)
except ValueError:
continue
for key in ["category", "tags"]:
for value in keywords_and_tags.get(key, []):
value_slug = _pl.string.to_slug(value)
key_singular = key.removesuffix('s')
final_key = f"blog_{key_singular}_{value_slug}"
if final_key in pages:
raise _exception.data_gen.ControlManWebsiteError(
"Duplicate page ID. "
f"Generated ID '{final_key}' already exists "
f"for page '{pages[final_key]['path']}'. "
"Please do not use `ccid` values that start with 'blog_'."
)
blog_path_prefix = f"{blog_path_str}/" if blog_path_str != "." else ""
blog_group_path = f"{blog_path_prefix}{key_singular}/{value_slug}"
blog_pages_final[final_key] = {
"title": value,
"path": blog_group_path,
"url": f"{self._data['web.url.home']}/{blog_group_path}",
}
self._data["web.page"] = pages | blog_pages_final
blog_path = self._data["web.extension.ablog.config.blog_path"] or "blog"
for key, values in blog.items():
for value in set(values):
value_slug = _pl.string.to_slug(value)
key_singular = key.removesuffix('s')
final_key = f"blog_{key_singular}_{value_slug}"
if final_key in pages:
_logger.error(
_mdit.inline_container(
"Duplicate webpage ID ",
_mdit.element.code_span(final_key)
),
f"Generated ID '{final_key}' already exists "
f"for page '{pages[final_key]['path']}'. "
"Please do not use `ccid` values that start with 'blog_'."
)
blog_group_path = f"{blog_path}/{key_singular}/{value_slug}"
pages[final_key] = {
"title": value,
"path": blog_group_path,
"url": f"{self._data['web.url.home']}/{blog_group_path}",
}
self._data["web.page"] = pages
return
1 change: 0 additions & 1 deletion src/controlman/file_gen/python.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

# Standard libraries
from typing import Literal
import re
import textwrap
from pathlib import Path as _Path

Expand Down

0 comments on commit d4e7078

Please sign in to comment.