Fix: add metadata to bf16 safetensors for loading using transformers

This commit is contained in:
root 2025-03-06 14:25:47 +08:00
parent 592fd5daf8
commit be411d69f4

View File

@ -85,7 +85,7 @@ def main(fp8_path, bf16_path):
new_state_dict[weight_name] = weight new_state_dict[weight_name] = weight
new_safetensor_file = os.path.join(bf16_path, file_name) new_safetensor_file = os.path.join(bf16_path, file_name)
save_file(new_state_dict, new_safetensor_file) save_file(new_state_dict, new_safetensor_file, metadata={"format": "pt"})
# Memory management: keep only the 2 most recently used files # Memory management: keep only the 2 most recently used files
if len(loaded_files) > 2: if len(loaded_files) > 2: