update @ Sun Dec 15 03:35:24 EET 2024

Signed-off-by: Ari Archer <ari@ari.lt>
This commit is contained in:
Arija A. 2024-12-15 03:35:24 +02:00
parent c85b02469c
commit fb78c01bfe
WARNING! Although there is a key with this ID in the database it does not verify this commit! This commit is SUSPICIOUS.
GPG key ID: A50D5B4B599AF8A2
3 changed files with 103 additions and 22 deletions

1
.gitignore vendored
View file

@ -9,6 +9,7 @@ venv/
/blog_json_hash.txt
/manifest.json
/recents_json_hash.txt
/media/media_json_hash.txt
/recents.json
/rss.xml
/sitemap.xml

1
media/media.json Normal file
View file

@ -0,0 +1 @@
{}

View file

@ -551,6 +551,20 @@ def select_posts(posts: dict[str, dict[str, typing.Any]]) -> tuple[str, ...]:
)
def select_medias() -> tuple[str, ...]:
return tuple(
map(
lambda opt: opt.split("|", maxsplit=1)[0].strip(),
select_multi(
tuple(
f"{mdx} | {mdy['type']}, {mdy['alt']} for {mdy['purpose']} | {mdy['title']}, {mdy['credit']}"
for mdx, mdy in MEDIA_INDEX.items()
)
),
)
)
if NCI:
try:
import readline
@ -765,7 +779,7 @@ def parse_inline_media_embed(
{"type": "linebreak"},
{
"type": "emphasis",
"raw": f"{mdx['alt']} | \"{mdx['title']}\" by {mdx['credit']} ({mdx['license']}). Purpose: {html_escape(mdx['purpose'])}. Uploaded on {datetime.datetime.utcfromtimestamp(mdx['created']).strftime('%a, %d %b %Y %H:%M:%S GMT')}. ",
"raw": f"{mdx['alt']} | \"{mdx['title']}\" by {mdx['credit']} ({mdx['license']}). Purpose: {html_escape(mdx['purpose'])}. Uploaded on {datetime.datetime.utcfromtimestamp(mdx['uploaded']).strftime('%a, %d %b %Y %H:%M:%S GMT')}. ",
},
{
"type": "link",
@ -952,7 +966,7 @@ def new(config: dict[str, typing.Any]) -> int:
"description": description.strip(),
"content": content,
"keywords": keywords,
"created": datetime.datetime.utcnow().timestamp(),
"uploaded": datetime.datetime.utcnow().timestamp(),
}
return OK
@ -1486,7 +1500,10 @@ def apis(config: dict[str, typing.Any]) -> int:
)
lnew(f"generated {recents.name!r}")
for api in recents.name, CONFIG_FILE:
for api in recents.name, CONFIG_FILE, "media/media.json":
if not os.path.isfile(api):
continue
with open(api, "rb") as content:
h: str = hashlib.sha256(content.read()).hexdigest()
@ -1509,11 +1526,12 @@ def clean(config: dict[str, typing.Any]) -> int:
except IsADirectoryError:
shutil.rmtree(file)
for pattern in (
for pattern in {
config["posts-dir"],
"index.html",
f"{config['assets-dir']}/*.min.*",
"blog_json_hash.txt",
"media/media_json_hash.txt",
"manifest.json",
f"{config['assets-dir']}/fonts/*.min.*",
"recents_json_hash.txt",
@ -1522,7 +1540,7 @@ def clean(config: dict[str, typing.Any]) -> int:
"robots.txt",
"sitemap.xml",
"stats",
):
}:
if os.path.exists(pattern):
remove(pattern)
else:
@ -1623,6 +1641,8 @@ def blog(config: dict[str, typing.Any]) -> int:
def media(config: dict[str, typing.Any]) -> int:
"""add media"""
assert config is config, "Unused"
path: str = iinput("media path")
path = os.path.expanduser(path)
@ -1655,15 +1675,9 @@ def media(config: dict[str, typing.Any]) -> int:
os.makedirs("media", exist_ok=True)
if os.path.exists("media/media.json"):
with open("media/media.json", "r") as fp:
index: dict[str, dict[str, typing.Any]] = json.load(fp)
else:
index = {}
# Check if it exists
if hash_hex in index:
if hash_hex in MEDIA_INDEX:
return err(f"media pointing to {path!r} already exists")
# Process stuff
@ -1693,7 +1707,7 @@ def media(config: dict[str, typing.Any]) -> int:
optimize=True,
)
index[hash_hex] = {
MEDIA_INDEX[hash_hex] = {
"type": "image",
"width": width,
"height": height,
@ -1702,14 +1716,14 @@ def media(config: dict[str, typing.Any]) -> int:
elif mime.startswith("audio/"):
shutil.copy(path, fpath)
index[hash_hex] = {
MEDIA_INDEX[hash_hex] = {
"type": "audio",
"alt": iinput("alt text"),
}
else:
return err(f"unsupported MIME: {mime!r}")
index[hash_hex].update(
MEDIA_INDEX[hash_hex].update(
{
"purpose": purpose,
"title": title,
@ -1723,16 +1737,70 @@ def media(config: dict[str, typing.Any]) -> int:
lnew(f"media {hash_hex} created")
# Update media.json
return OK
with open("media/media.json", "w") as fp:
json.dump(index, fp, indent=config["indent"])
with open("media/media_json_hash.txt", "w") as fp:
with open("media/media.json", "rb") as fk:
fp.write(hashlib.sha256(fk.read()).hexdigest())
@cmds.new
def lsmedia(config: dict[str, typing.Any]) -> int:
"""list media"""
lnew("Updated media.json and media_json_hash.txt")
assert config is config, "Unused"
for mdx, mdy in MEDIA_INDEX.items():
print(mdx)
for k, v in mdy.items():
print(" ", k, "=", v)
return OK
@cmds.new
def rmmedia(config: dict[str, typing.Any]) -> int:
"""remove media"""
assert config is config, "Unusued"
for mdx in select_medias():
imp(f"Removing media {mdx}")
os.remove(f"media/{mdx}.{MEDIA_INDEX[mdx]['ext']}")
del MEDIA_INDEX[mdx]
return OK
@cmds.new
def purgemedia(config: dict[str, typing.Any]) -> int:
"""purge unused or unindexed media"""
posts: typing.Any = config["posts"].values()
unused: set[str] = set()
for mdx in MEDIA_INDEX:
used: bool = False
for post in posts:
if f"<@{mdx}>" in post["content"]:
used = True
break
if not used:
unused.add(mdx)
# Cannot change dict size during iteration
for mdy in unused:
log(f"Unindexing unused media {mdy}")
del MEDIA_INDEX[mdy]
for file in os.listdir("media"):
if file == "media.json": # Ignore index file
continue
pid: str = os.path.splitext(os.path.basename(file))[0]
if pid not in MEDIA_INDEX:
imp(f"Removing unindexed media {pid}")
os.remove(f"media/{file}")
return OK
@ -1761,6 +1829,11 @@ def main() -> int:
MEDIA_INDEX.update(json.load(fp))
log("Loaded the media index (media/media.json)")
else:
os.makedirs("media", exist_ok=True)
with open("media/media.json", "w") as fp:
fp.write("{}")
sort(cfg)
@ -1792,6 +1865,12 @@ def main() -> int:
log(f"dumping config to {config.name!r}")
json.dump(cfg, config, indent=cfg["indent"] if NCI else None)
# Update media.json
with open("media/media.json", "w") as fp:
log("dumping media.json")
json.dump(MEDIA_INDEX, fp, indent=cfg["indent"])
log(f"goodbye world, return {code}, total {ctimer() - main_t} s")
return code