Skip to content
This repository has been archived by the owner on Sep 19, 2022. It is now read-only.

Commit

Permalink
Parameterize output directory
Browse files Browse the repository at this point in the history
  • Loading branch information
johnugeorge committed Dec 11, 2018
1 parent bf63af4 commit 0a24c1b
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions examples/katib/mnist_with_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from torchvision import datasets, transforms
from torch.autograd import Variable
from tensorboardX import SummaryWriter
writer = SummaryWriter('runs')
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
Expand All @@ -26,6 +25,8 @@
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--dir', default='logs', metavar='L',
help='directory where summary logs are stored')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()

Expand Down Expand Up @@ -72,7 +73,8 @@ def forward(self, x):
if args.cuda:
model.cuda()

print('Learning rate: {} Momentum: {}'.format(args.lr, args.momentum))
print('Learning rate: {} Momentum: {} Logs dir: {}'.format(args.lr, args.momentum, args.dir))
writer = SummaryWriter(args.dir)
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum)

def train(epoch):
Expand Down

0 comments on commit 0a24c1b

Please sign in to comment.