import math
from abc import ABC, abstractmethod
import torch
import torch.nn.functional as F
import torch.utils
from torch.distributions.multivariate_normal import MultivariateNormal
from fl4health.utils.dataset import TensorDataset
[docs]
class SyntheticFedProxDataset(ABC):
[docs]
def __init__(
self,
num_clients: int,
temperature: float = 1.0,
input_dim: int = 60,
output_dim: int = 10,
samples_per_client: int = 1000,
) -> None:
"""
Abstract base class to support synthetic dataset generation in the style of the original FedProx paper.
Paper link: https://arxiv.org/abs/1812.06127
Reference code: https://github.com/litian96/FedProx/tree/master/data/synthetic_1_1
NOTE: In the implementations here, all clients receive the same number of samples. In the original FedProx
setup, they are sampled using a power law.
Args:
num_clients (int): Number of datasets (one per client) to generate
temperature (float, optional): temperature used for the softmax mapping to labels. Defaults to 1.0.
input_dim (int, optional): dimension of the input features for the synthetic dataset. Default is as in the
FedProx paper. Defaults to 60.
output_dim (int, optional): dimension of the output labels for the synthetic dataset. These are one-hot
encoding labels. Default is as in the FedProx paper. Defaults to 10.
samples_per_client (int, optional): Number of samples to generate in each client's dataset.
Defaults to 1000.
"""
self.num_clients = num_clients
self.temperature = temperature
self.input_dim = input_dim
self.output_dim = output_dim
self.samples_per_client = samples_per_client
# Sigma in the FedProx paper
self.input_covariance = self.construct_covariance_matrix()
[docs]
def construct_covariance_matrix(self) -> torch.Tensor:
"""
This function generations the covariance matrix used in generating input features. It is fixed across all
datasets. It is a diagonal matrix with diagonal entries x_{j, j} = j^{-1.2}, where j starts at 1 in this
notation. The matrix is of dimension input_dim x input_dim
Returns:
torch.Tensor: Covariance matrix for generation of input features.
"""
sigma_diagonal = torch.zeros(self.input_dim)
for i in range(self.input_dim):
# indexing in the original implementation starts at 1, so i+1
sigma_diagonal[i] = math.pow((i + 1), -1.2)
return torch.diag(sigma_diagonal)
[docs]
def generate(self) -> list[TensorDataset]:
"""
Based on the class parameters, generate a list of synthetic TensorDatasets, one for each client.
Returns:
list[TensorDataset]: Synthetic datasets for each client.
"""
client_tensors = self.generate_client_tensors()
assert (
len(client_tensors) == self.num_clients
), "The tensors returned by generate_client_tensors should have the same length as self.num_clients"
client_datasets = [TensorDataset(X, Y) for X, Y in client_tensors]
return client_datasets
[docs]
@abstractmethod
def generate_client_tensors(self) -> list[tuple[torch.Tensor, torch.Tensor]]:
"""
Method to be implemented determining how to generate the tensors in the subclasses. Each of the subclasses
uses the affine mapping, but the parameters for how that affine mapping is setup are different and determined
in this function.
Returns:
list[tuple[torch.Tensor, torch.Tensor]]: input and output tensors for each of the clients.
"""
pass
[docs]
class SyntheticNonIidFedProxDataset(SyntheticFedProxDataset):
[docs]
def __init__(
self,
num_clients: int,
alpha: float,
beta: float,
temperature: float = 1.0,
input_dim: int = 60,
output_dim: int = 10,
samples_per_client: int = 1000,
) -> None:
"""
NON-IID Synthetic dataset generator modeled after the implementation in the original FedProx paper. See Section
5.1 in the paper link below for additional details. The non-IID generation code is modeled after the code
housed in the github link below as well.
Paper link: https://arxiv.org/abs/1812.06127
Reference code: https://github.com/litian96/FedProx/tree/master/data/synthetic_1_1
NOTE: This generator ends up with fairly skewed labels in generation. That is, many of the clients will not
have representations of all the labels. This has been verified as also occurring in the reference code above
and is not a bug.
The larger alpha and beta are, the more heterogeneous the clients data is. The larger alpha is, the more
"different" the affine transformations are from one another. The larger beta is, the larger the variance in the
centers of the input features.
Args:
num_clients (int): Number of datasets (one per client) to generate
alpha (float): This is the standard deviation for the mean (u_k), drawn from a centered normal
distribution, which is used to generate the elements of the affine transformation components W, b.
beta (float): This is the standard deviation for each element of the multidimensional mean (v_k),
drawn from a centered normal distribution, which is used to generate the elements of the input features
for x ~ N(B_k, Sigma)
temperature (float, optional): temperature used for the softmax mapping to labels. Defaults to 1.0.
input_dim (int, optional): dimension of the input features for the synthetic dataset. Default is as in the
FedProx paper. Defaults to 60.
output_dim (int, optional): dimension of the output labels for the synthetic dataset. These are one-hot
encoding labels. Default is as in the FedProx paper. Defaults to 10.
samples_per_client (int, optional): Number of samples to generate in each client's dataset.
Defaults to 1000.
"""
super().__init__(
num_clients=num_clients,
temperature=temperature,
input_dim=input_dim,
output_dim=output_dim,
samples_per_client=samples_per_client,
)
self.alpha = alpha
self.beta = beta
[docs]
def generate_client_tensors(self) -> list[tuple[torch.Tensor, torch.Tensor]]:
"""
For the Non-IID synthetic generator, this function uses the values of alpha and beta to sample the parameters
that will be used to generate the synthetic datasets on each client. For each client, beta is used to sample
a mean value from which to generate the input features, alpha is used to sample a mean for the transformation
components of W and b. Note that sampling occurs for EACH client independently. The larger alpha and beta
the larger the variance in these values, implying higher probability of heterogeneity.
Returns:
list[tuple[torch.Tensor, torch.Tensor]]: Set of input and output tensors for each client.
"""
tensors_per_client: list[tuple[torch.Tensor, torch.Tensor]] = []
for _ in range(self.num_clients):
B = torch.normal(0.0, self.beta, (1,))
# v_k in the FedProx paper
input_means = torch.normal(B, torch.ones(self.input_dim))
# u_k in the FedProx paper
affine_transform_means = torch.normal(0, self.alpha, (1,)).item()
client_X, client_Y = self.get_input_output_tensors(
affine_transform_means, input_means, self.input_covariance
)
tensors_per_client.append((client_X, client_Y))
return tensors_per_client
[docs]
class SyntheticIidFedProxDataset(SyntheticFedProxDataset):
[docs]
def __init__(
self,
num_clients: int,
temperature: float = 1.0,
input_dim: int = 60,
output_dim: int = 10,
samples_per_client: int = 1000,
) -> None:
"""
IID Synthetic dataset generator modeled after the implementation in the original FedProx paper. See Appendix
C.1 in the paper link below for additional details. The IID generation code is based strictly on the
description in the appendix for IID dataset generation.
Paper link: https://arxiv.org/abs/1812.06127
NOTE: This generator ends up with fairly skewed labels in generation. That is, many of the clients will not
have representations of all the labels. This has been verified as also occurring in the reference code above
and is not a bug.
Args:
num_clients (int): Number of datasets (one per client) to generate
temperature (float, optional): temperature used for the softmax mapping to labels. Defaults to 1.0.
input_dim (int, optional): dimension of the input features for the synthetic dataset. Default is as in the
FedProx paper. Defaults to 60.
output_dim (int, optional): dimension of the output labels for the synthetic dataset. These are one-hot
encoding labels. Default is as in the FedProx paper. Defaults to 10.
samples_per_client (int, optional): Number of samples to generate in each client's dataset.
Defaults to 1000.
"""
super().__init__(
num_clients=num_clients,
temperature=temperature,
input_dim=input_dim,
output_dim=output_dim,
samples_per_client=samples_per_client,
)
# As described in the original paper, the affine transformation is SHARED by all clients and the elements
# of W and b are sampled from standard normal distributions.
self.W = torch.normal(0, torch.ones((self.output_dim, self.input_dim)))
self.b = torch.normal(0, torch.ones(self.output_dim, 1))
# Similarly, all input features across clients are all sampled from a centered multidimensional normal
# distribution with diagonal covariance matrix sigma (see base class for definition).
self.input_multivariate_normal = MultivariateNormal(
loc=torch.zeros(self.input_dim), covariance_matrix=self.input_covariance
)
[docs]
def generate_client_tensors(self) -> list[tuple[torch.Tensor, torch.Tensor]]:
"""
For IID generation, this function is simple, as we need not sample any parameters per client for use in
generation, as these are all shared across clients.
Returns:
list[tuple[torch.Tensor, torch.Tensor]]: Set of input and output tensors for each client.
"""
tensors_per_client: list[tuple[torch.Tensor, torch.Tensor]] = []
for _ in range(self.num_clients):
client_X, client_Y = self.get_input_output_tensors()
tensors_per_client.append((client_X, client_Y))
return tensors_per_client