Encrypted fine-tuning
Overview
Usage
1. Apply the peft LORA layers
peft LORA layersimport torch
from torch import nn, optim
from peft import LoraConfig, get_peft_model
from concrete.ml.torch.lora import LoraTraining, get_remote_names
from concrete.ml.torch.hybrid_model import HybridFHEModel
from sklearn.datasets import make_circles
from torch.utils.data import DataLoader, TensorDataset
class SimpleMLP(nn.Module):
"""Simple MLP model without LoRA layers."""
def __init__(self, input_size=2, hidden_size=128, num_classes=2):
super().__init__()
self.fc1 = nn.Linear(input_size, hidden_size)
self.relu = nn.ReLU()
self.fc2 = nn.Linear(hidden_size, num_classes)
def forward(self, x):
"""Forward pass of the MLP."""
out = self.fc1(x)
out = self.relu(out)
out = self.fc2(out)
return out
lora_config = LoraConfig(
r=1, lora_alpha=1, lora_dropout=0.01, target_modules=["fc1", "fc2"], bias="none"
)
model = SimpleMLP()
# The initial training loop of the model should be
# added at this point on an initial data-set
# A second data-set, task2 is generated
X_task2, y_task2 = make_circles(n_samples=32, noise=0.2, factor=0.5)
train_loader_task2 = DataLoader(
TensorDataset(torch.Tensor(X_task2), torch.LongTensor(y_task2)),
batch_size=32,
shuffle=True
)
# Apply LoRA to the model
peft_model = get_peft_model(model, lora_config)2. Convert the LORA model to use custom Concrete ML layers
3. Compile a hybrid FHE model for the LORA adapted PyTorch model
4. Train the model on private data
Additional options
Inference
Toggle LORA layers
Last updated
Was this helpful?