Skip to content

Commit

Permalink
refactor code and make PS faster
Browse files Browse the repository at this point in the history
  • Loading branch information
Mostafa Elhoushi committed Feb 17, 2020
1 parent 7ac9846 commit 4040aec
Show file tree
Hide file tree
Showing 37 changed files with 2,154 additions and 952 deletions.
16 changes: 13 additions & 3 deletions pytorch/cifar10.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,20 @@
from torchsummary import summary
import optim

from convert_to_shift import convert_to_shift, round_shift_weights, count_layer_type
from deepshift.convert import convert_to_shift, round_shift_weights, count_layer_type
from unoptimized.convert import convert_to_unoptimized

import cifar10_models as models

'''
Unfortunately, none of the pytorch repositories with ResNets on CIFAR10 provides an
implementation as described in the original paper. If you just use the torchvision's
models on CIFAR10 you'll get the model that differs in number of layers and parameters.
This is unacceptable if you want to directly compare ResNet-s on CIFAR10 with the
original paper. The purpose of resnet_cifar10 (which has been obtained from https://github.com/akamaster/pytorch_resnet_cifar10
is to provide a valid pytorch implementation of ResNet-s for CIFAR10 as described in the original paper.
'''

model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
Expand All @@ -46,8 +56,8 @@
help='path to file to load its weights (default: none)')
parser.add_argument('-s', '--shift-depth', type=int, default=0,
help='how many layers to convert to shift')
parser.add_argument('-st', '--shift-type', default='Q', choices=['Q', 'PS'],
help='type of DeepShift method for training and representing weights (default: Q)')
parser.add_argument('-st', '--shift-type', default='PS', choices=['Q', 'PS'],
help='type of DeepShift method for training and representing weights (default: PS)')
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--epochs', default=200, type=int, metavar='N',
Expand Down
5 changes: 4 additions & 1 deletion pytorch/cifar10_models/densenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import torch.nn.functional as F
from torch.autograd import Variable

__all__ = ['densenet121', 'densenet169', 'densenet201', 'densenet264']
__all__ = ['densenet40', 'densenet121', 'densenet169', 'densenet201', 'densenet264']

"""
densenet with basic block.
Expand Down Expand Up @@ -129,6 +129,9 @@ def forward(self, x):

return x

def densenet40():
return densenet(depth=40)

def densenet121():
return densenet(depth=121)

Expand Down
5 changes: 0 additions & 5 deletions pytorch/cpu_kernal/setup.py

This file was deleted.

94 changes: 0 additions & 94 deletions pytorch/cuda_kernel/shift_cuda.cpp

This file was deleted.

Loading

0 comments on commit 4040aec

Please sign in to comment.