Skip to content

Commit

Permalink
🐛 version 0.51.1
Browse files Browse the repository at this point in the history
recursive dump Media with save_dir
  • Loading branch information
RF-Tar-Railt committed Aug 6, 2024
1 parent d09416c commit 97e4788
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 40 deletions.
2 changes: 1 addition & 1 deletion src/nonebot_plugin_alconna/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@
from .uniseg import SupportAdapterModule as SupportAdapterModule
from .extension import add_global_extension as add_global_extension

__version__ = "0.51.0"
__version__ = "0.51.1"

__plugin_meta__ = PluginMetadata(
name="Alconna 插件",
Expand Down
2 changes: 1 addition & 1 deletion src/nonebot_plugin_alconna/uniseg/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
from .constraint import SupportAdapterModule as SupportAdapterModule
from .adapters import BUILDER_MAPPING, FETCHER_MAPPING, EXPORTER_MAPPING

__version__ = "0.51.0"
__version__ = "0.51.1"

__plugin_meta__ = PluginMetadata(
name="Universal Segment 插件",
Expand Down
18 changes: 7 additions & 11 deletions src/nonebot_plugin_alconna/uniseg/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
Emoji,
Hyper,
Image,
Media,
Reply,
Video,
Voice,
Expand Down Expand Up @@ -1409,33 +1408,30 @@ async def finish(
raise FinishedException

@overload
def dump(self, media_save_dir: Union[str, Path, None] = None) -> list[dict]: ...
def dump(self, media_save_dir: Optional[Union[str, Path, bool]] = None) -> list[dict]: ...

@overload
def dump(self, media_save_dir: Union[str, Path, None] = None, json: Literal[True] = True) -> str: ...
def dump(self, media_save_dir: Optional[Union[str, Path, bool]] = None, json: Literal[True] = True) -> str: ...

def dump(
self, media_save_dir: Union[str, Path, None] = None, json: bool = False
self, media_save_dir: Optional[Union[str, Path, bool]] = None, json: bool = False
) -> Union[str, list[dict[str, Any]]]:
"""将消息序列化为 JSON 格式
注意:
若 media_save_dir 为 False,则不会保存媒体文件。
若 media_save_dir 为 True,则会将文件数据转为 base64 编码。
若不指定 media_save_dir,则会尝试导入 `nonebot_plugin_localstore` 并使用其提供的路径。
否则,将会尝试使用当前工作目录。
Args:
media_save_dir (Union[str, Path, None], optional): 媒体文件保存路径. Defaults to None.
media_save_dir (Union[str, Path, bool, None], optional): 媒体文件保存路径. Defaults to None.
json (bool, optional): 是否返回 JSON 字符串. Defaults to False.
Returns:
Union[str, list[dict]]: 序列化后的消息
"""
result = []
for seg in self:
if isinstance(seg, Media):
result.append(seg.dump(media_save_dir=media_save_dir))
else:
result.append(seg.dump())
result = [seg.dump(media_save_dir=media_save_dir) for seg in self]
return dumps(result, ensure_ascii=False) if json else result

@classmethod
Expand Down
85 changes: 59 additions & 26 deletions src/nonebot_plugin_alconna/uniseg/segment.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import re
import json
import base64
import hashlib
import importlib
import contextlib
Expand Down Expand Up @@ -114,11 +115,32 @@ def _validate(cls, value) -> Self:
return value
raise ValueError(f"Type {type(value)} can not be converted to {cls}")

def dump(self) -> dict:
def dump(self, *, media_save_dir: Optional[Union[str, Path, bool]] = None) -> dict:
"""将对象转为 dict 数据
注意:
若 media_save_dir 为 False,则不会保存媒体文件。
若 media_save_dir 为 True,则会将文件数据转为 base64 编码。
若不指定 media_save_dir,则会尝试导入 `nonebot_plugin_localstore` 并使用其提供的路径。
否则,将会尝试使用当前工作目录。
"""
data = {f.name: getattr(self, f.name) for f in fields(self) if f.name not in ("origin", "_children")}
data = {"type": self.type, **{k: v for k, v in data.items() if v is not None}}
if isinstance(self, Media):
if self.name == self.__default_name__:
data.pop("name", None)
if self.url or self.path or not self.raw:
data.pop("raw", None)
data.pop("mimetype", None)
elif media_save_dir is True:
data["raw"] = base64.b64encode(self.raw_bytes).decode()
elif media_save_dir is not False:
path = self.save(media_save_dir=media_save_dir)
del data["raw"]
del data["mimetype"]
data["path"] = str(path.resolve().as_posix())
if self._children:
data["children"] = [child.dump() for child in self._children]
return {"type": self.type, **{k: v for k, v in data.items() if v is not None}}
data["children"] = [child.dump(media_save_dir=media_save_dir) for child in self._children]
return data

@classmethod
def load(cls, data: dict) -> Self:
Expand Down Expand Up @@ -512,7 +534,7 @@ def rstrip(self, chars: Optional[str] = None) -> "Text":
def strip(self, chars: str = " ") -> "Text":
return self.lstrip(chars).rstrip(chars)

def dump(self) -> dict:
def dump(self, **kwargs) -> dict:
data: dict = {"type": "text", "text": self.text}
if self.styles:
data["styles"] = {":".join(map(str, k)): v for k, v in self.styles.items()}
Expand Down Expand Up @@ -592,15 +614,18 @@ def raw_bytes(self) -> bytes:
self.name = f"{info.types[0]}.{info.extensions[0]}"
return raw

def dump(self, *, media_save_dir: Optional[Union[str, Path]] = None) -> dict:
data = super().dump()
if self.__is_default_name():
data.pop("name", None)
if self.url or self.path or not self.raw:
data.pop("raw", None)
data.pop("mimetype", None)
return {k: v for k, v in data.items() if v is not None}
if media_save_dir:
@classmethod
def load(cls, data: dict) -> Self:
if children := data.get("children", []):
children = [get_segment_class(child["type"]).load(child) for child in children]
if "raw" in data and isinstance(data["raw"], str):
data["raw"] = base64.b64decode(data["raw"])
return cls(**{k: v for k, v in data.items() if k not in ("type", "children")})(*children) # type: ignore

def save(self, media_save_dir: Optional[Union[str, Path]] = None) -> Path:
if not self.raw:
raise ValueError
if isinstance(media_save_dir, (str, Path)):
dir_ = Path(media_save_dir)
else:
try:
Expand All @@ -612,8 +637,6 @@ def dump(self, *, media_save_dir: Optional[Union[str, Path]] = None) -> dict:
get_data_dir = None # noqa
dir_ = Path.cwd() / ".data" / "media"
raw = self.raw.getvalue() if isinstance(self.raw, BytesIO) else self.raw
del data["raw"]
del data["mimetype"]
header = raw[:128]
info = fleep.get(header)
ext = info.extensions[0] if info.extensions else "bin"
Expand All @@ -622,8 +645,7 @@ def dump(self, *, media_save_dir: Optional[Union[str, Path]] = None) -> dict:
path.parent.mkdir(parents=True, exist_ok=True)
with path.open("wb+") as f:
f.write(raw)
data["path"] = str(path.resolve().as_posix())
return {k: v for k, v in data.items() if v is not None}
return path.resolve()


@dataclass
Expand Down Expand Up @@ -695,8 +717,11 @@ def __init__(
if not hasattr(self, "_children"):
self._children = []

def dump(self) -> dict:
return {"type": self.type, "id": self.id}
def dump(self, *, media_save_dir: Optional[Union[str, Path, bool]] = None) -> dict:
data = super().dump(media_save_dir=media_save_dir)
data["id"] = self.id
data.pop("msg", None)
return data


@dataclass
Expand All @@ -708,7 +733,7 @@ class RefNode:
context: Optional[str] = None
"""可能的群聊id"""

def dump(self):
def dump(self, **kwargs):
return {"type": "ref", "id": self.id, "context": self.context}

@classmethod
Expand All @@ -731,12 +756,16 @@ class CustomNode:
context: Optional[str] = None
"""可能的群聊id"""

def dump(self):
def dump(self, *, media_save_dir: Optional[Union[str, Path, bool]] = None):
return {
"type": "custom",
"uid": self.uid,
"name": self.name,
"content": self.content if isinstance(self.content, str) else [seg.dump() for seg in self.content],
"content": (
self.content
if isinstance(self.content, str)
else [seg.dump(media_save_dir=media_save_dir) for seg in self.content]
),
"time": self.time.timestamp(),
"context": self.context,
}
Expand Down Expand Up @@ -773,8 +802,12 @@ def __call__(self, *segments: Union[Segment, RefNode, CustomNode]) -> Self:
self._children.extend(segments) # type: ignore
return self

def dump(self) -> dict:
return {"type": self.type, "id": self.id, "nodes": [node.dump() for node in self._children]}
def dump(self, *, media_save_dir: Optional[Union[str, Path, bool]] = None) -> dict:
return {
"type": self.type,
"id": self.id,
"nodes": [node.dump(media_save_dir=media_save_dir) for node in self._children],
}

@classmethod
def load(cls, data: dict):
Expand Down Expand Up @@ -891,7 +924,7 @@ class Other(Segment):
def __str__(self):
return f"[{self.origin.type}]"

def dump(self):
def dump(self, **kwargs):
return {
"type": "other",
"origin": asdict(self.origin),
Expand Down Expand Up @@ -940,7 +973,7 @@ def tp(self):

return UniMessage.template(lang.require(self.item.scope, self.item.type))

def dump(self):
def dump(self, **kwargs):
return {
"type": "i18n",
"scope": self.item.scope,
Expand Down
14 changes: 13 additions & 1 deletion tests/test_uniseg.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def test_unimsg():


def test_persistence():
from nonebot_plugin_alconna import UniMessage
from nonebot_plugin_alconna import Image, UniMessage

msg = UniMessage.at("123").at_channel("456").image(url="https://example.com/1.jpg").text("hello")
assert msg.dump() == [
Expand All @@ -97,6 +97,18 @@ def test_persistence():
]
assert UniMessage.load(msg1) == UniMessage.at("456").text("world")

msg2 = UniMessage.image(raw=b"123", mimetype="image/jpeg")
assert msg2.dump(media_save_dir=True) == [{"type": "image", "raw": "MTIz", "mimetype": "image/jpeg"}]
assert msg2.dump(media_save_dir=False) == [{"type": "image", "raw": b"123", "mimetype": "image/jpeg"}]

msg3 = [{"type": "image", "raw": "MTIz", "mimetype": "image/jpeg"}]
assert UniMessage.load(msg3) == msg2

msg4 = UniMessage(Image(url="https://example.com/1.jpg")(Image(raw=b"123")))
assert msg4.dump(media_save_dir=True) == [
{"type": "image", "url": "https://example.com/1.jpg", "children": [{"type": "image", "raw": "MTIz"}]},
]


@pytest.mark.asyncio()
async def test_fallback(app: App):
Expand Down

0 comments on commit 97e4788

Please sign in to comment.