Open-source libraries
Docs, repos, and a quickstart for FasterAI and FasterBench.
Quickstart
A minimal example to compress and benchmark a model.
from fasterai.prune.all import *
from fasterbench.benchmark import *
from torchvision.models import resnet18
# 1) Load a baseline model
model = resnet18()
# 2) Compress in seconds
pruner = Pruner(model, 50, 'global', large_final)
pruner.prune_model()
# 3) Benchmark compressed model
bench = benchmark(model, dummy)
# 4) Save compressed model
torch.save(model.state_dict(), "compressed-model.pth")