convert: add ability to convert safetensors files (#1276)

* when loading a safetensors file, ignore the metadata header
* check for safetensors files first, and only use PyTorch versions when safetensors aren't available
This commit is contained in:
ubik2 2023-05-08 04:54:26 -07:00 committed by GitHub
parent 1f48b0abcf
commit 95078cc554
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -766,7 +766,7 @@ def lazy_load_safetensors_file(fp: IO[bytes], path: Path) -> ModelPlus:
return UnquantizedTensor(np.frombuffer(buf, dtype=numpy_dtype).reshape(shape)) return UnquantizedTensor(np.frombuffer(buf, dtype=numpy_dtype).reshape(shape))
description = f'safetensors begin={begin} end={end} type={data_type} path={path}' description = f'safetensors begin={begin} end={end} type={data_type} path={path}'
return LazyTensor(load, shape, data_type, description) return LazyTensor(load, shape, data_type, description)
model = {name: convert(info) for (name, info) in header.items()} model = {name: convert(info) for (name, info) in header.items() if name != '__metadata__'}
return ModelPlus(model=model, paths=[path], format='safetensors', vocab=None) return ModelPlus(model=model, paths=[path], format='safetensors', vocab=None)
@ -1051,6 +1051,10 @@ def load_some_model(path: Path) -> ModelPlus:
'''Load a model of any supported format.''' '''Load a model of any supported format.'''
# Be extra-friendly and accept either a file or a directory: # Be extra-friendly and accept either a file or a directory:
if path.is_dir(): if path.is_dir():
# Check if it's a set of safetensors files first
files = list(path.glob("model-00001-of-*.safetensors"))
if not files:
# Try the PyTorch patterns too, with lower priority
globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt"] globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt"]
files = [file for glob in globs for file in path.glob(glob)] files = [file for glob in globs for file in path.glob(glob)]
if not files: if not files: