# token_uuid_slot.py import struct, uuid, sys, pathlib, tiktoken from openai_harmony import load_harmony_encoding, HarmonyEncodingName FMT_MODEL = "": 199998, "": 199999, "<|untrusted|>": 200000, "<|endofuntrusted|>": 200001, "<|return|>": 200002, "<|constrain|>": 200003, "<|reversed200004|>": 200004, "<|channel|>": 200005, "<|start|>": 200006, "<|end|>": 200007, "<|message|>": 200008, "<|reversed200008|>": 200008, "<|reversed200009|>": 200009, "<|reversed200010|>": 200010, "<|reversed200011|>": 200011, "<|call|>": 200012, "<|refusal|>": 200013, } def header_and_table_off(f): f.read(16) # magic f.read(16) # model uuid f.read(struct.calcsize(FMT_MODEL)) f.read(16) # apple uuid tok_uuid = uuid.UUID(bytes=f.read(16)) ns, nt, rs, ts = struct.unpack(" slot={slot}, uuid={u}\n") if __name__ == "__main__": # 用法: python token_uuid_slot.py "<|channel|>" show(sys.argv[1], sys.argv[2]) # python tests/token_uuid_slot.py /Volumes/long990max/gpustack_data/openai/gpt-oss-20b/metal/model.bin "<|channel|>" # python tests/token_uuid_slot.py /Volumes/long990max/project/openharmony-mlx/model.bin "<|channel|>" # python tests/token_uuid_slot.py /Volumes/long990max/gpustack_data/openai/gpt-oss-20b/metal/model.bin "<|message|>" # python tests/token_uuid_slot.py /Volumes/long990max/project/openharmony-mlx/model.bin "<|message|>" # python tests/token_uuid_slot.py /Volumes/long990max/gpustack_data/openai/gpt-oss-20b/metal/model.bin "<|return|>" # python tests/token_uuid_slot.py /Volumes/long990max/project/openharmony-mlx/model.bin "<|return|>"