-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
executable file
·112 lines (97 loc) · 5.19 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
#!/usr/bin/env python
import h5py
import matplotlib.pyplot as plt
import numpy as np
import argparse
import importlib
import random
import os
from FLAlgorithms.servers.serveravg import FedAvg
from FLAlgorithms.servers.serverpFedMe import pFedMe
from FLAlgorithms.servers.serverperavg import PerAvg
from FLAlgorithms.trainmodel.models import *
from utils.plot_utils import *
import torch
torch.manual_seed(0)
def main(dataset, algorithm, model, batch_size, learning_rate, beta, lamda, num_glob_iters, local_epochs, optimizer, numusers, K, personal_learning_rate, times):
for i in range(times):
print("---------------Running time:------------",i)
# Generate model
if(model == "mclr"):
if(dataset == "Mnist"):
model = Mclr_Logistic(), model
else:
model = Mclr_Logistic(60,10), model
if(model == "cnn"):
if(dataset == "Mnist"):
model = Net(), model
elif(dataset == "Cifar10"):
model = CifarNet(), model
if(model == "dnn"):
if(dataset == "Mnist"):
model = DNN(), model
else:
model = DNN(60,20,10), model
if torch.cuda.is_available():
model[0].cuda()
# select algorithm
if(algorithm == "FedAvg"):
server = FedAvg(dataset, algorithm, model, batch_size, learning_rate, beta, lamda, num_glob_iters, local_epochs, optimizer, numusers, i)
if(algorithm == "pFedMe"):
server = pFedMe(dataset, algorithm, model, batch_size, learning_rate, beta, lamda, num_glob_iters, local_epochs, optimizer, numusers, K, personal_learning_rate, i)
if(algorithm == "PerAvg"):
server = PerAvg(dataset, algorithm, model, batch_size, learning_rate, beta, lamda, num_glob_iters, local_epochs, optimizer, numusers, i)
server.train()
server.test()
# Average data
if(algorithm == "PerAvg"):
algorithm == "PerAvg_p"
if(algorithm == "pFedMe"):
average_data(num_users=numusers, loc_ep1=local_epochs, Numb_Glob_Iters=num_glob_iters, lamb=lamda,learning_rate=learning_rate, beta = beta, algorithms="pFedMe_p", batch_size=batch_size, dataset=dataset, k = K, personal_learning_rate = personal_learning_rate,times = times)
average_data(num_users=numusers, loc_ep1=local_epochs, Numb_Glob_Iters=num_glob_iters, lamb=lamda,learning_rate=learning_rate, beta = beta, algorithms=algorithm, batch_size=batch_size, dataset=dataset, k = K, personal_learning_rate = personal_learning_rate,times = times)
if __name__ == "__main__":
print("__main__")
parser = argparse.ArgumentParser()
parser.add_argument("--dataset", type=str, default="Cifar10", choices=["Mnist", "Synthetic", "Cifar10"])
parser.add_argument("--model", type=str, default="cnn", choices=["dnn", "mclr", "cnn"])
parser.add_argument("--batch_size", type=int, default=20)
parser.add_argument("--learning_rate", type=float, default=0.005, help="Local learning rate")
parser.add_argument("--beta", type=float, default=1.0, help="Average moving parameter for pFedMe, or Second learning rate of Per-FedAvg")
parser.add_argument("--lamda", type=int, default=15, help="Regularization term")
parser.add_argument("--num_global_iters", type=int, default=800)
parser.add_argument("--local_epochs", type=int, default=20)
parser.add_argument("--optimizer", type=str, default="SGD")
parser.add_argument("--algorithm", type=str, default="pFedMe",choices=["pFedMe", "PerAvg", "FedAvg"])
parser.add_argument("--numusers", type=int, default=20, help="Number of Users per round")
parser.add_argument("--K", type=int, default=5, help="Computation steps")
parser.add_argument("--personal_learning_rate", type=float, default=0.09, help="Persionalized learning rate to caculate theta aproximately using K steps")
parser.add_argument("--times", type=int, default=5, help="running time")
args = parser.parse_args()
print("=" * 80)
print("Summary of training process:")
print("Algorithm: {}".format(args.algorithm))
print("Batch size: {}".format(args.batch_size))
print("Learing rate : {}".format(args.learning_rate))
print("Average Moving : {}".format(args.beta))
print("Subset of users : {}".format(args.numusers))
print("Number of global rounds : {}".format(args.num_global_iters))
print("Number of local rounds : {}".format(args.local_epochs))
print("Dataset : {}".format(args.dataset))
print("Local Model : {}".format(args.model))
print("=" * 80)
main(
dataset=args.dataset,
algorithm = args.algorithm,
model=args.model,
batch_size=args.batch_size,
learning_rate=args.learning_rate,
beta = args.beta,
lamda = args.lamda,
num_glob_iters=args.num_global_iters,
local_epochs=args.local_epochs,
optimizer= args.optimizer,
numusers = args.numusers,
K=args.K,
personal_learning_rate=args.personal_learning_rate,
times = args.times
)