Encrypted fine-tuning
Overview
Usage
1. Apply the peft LoRA layers
peft LoRA layersimport torch
import torch.nn.functional as F
from torch import nn, optim
from peft import LoraConfig, get_peft_model
from concrete.ml.torch.lora import LoraTrainer
from concrete.ml.torch.hybrid_model import HybridFHEModel
from sklearn.datasets import make_circles
from torch.utils.data import DataLoader, TensorDataset
class SimpleMLP(nn.Module):
"""Simple MLP model without LoRA layers."""
def __init__(self, input_size=2, hidden_size=128, num_classes=2):
super().__init__()
self.fc1 = nn.Linear(input_size, hidden_size)
self.relu = nn.ReLU()
self.fc2 = nn.Linear(hidden_size, num_classes)
def forward(self, x, labels=None):
"""Forward pass of the MLP."""
out = self.fc1(x)
out = self.relu(out)
out = self.fc2(out)
return out
# Create an initial model
model = SimpleMLP()
# Apply LoRA configuration
lora_config = LoraConfig(
r=1,
lora_alpha=1,
lora_dropout=0.01,
target_modules=["fc1", "fc2"],
bias="none"
)
peft_model = get_peft_model(model, lora_config)
# Generate a second data-set for demonstration purposes
X_task2, y_task2 = make_circles(n_samples=32, noise=0.2, factor=0.5)
train_loader_task2 = DataLoader(
TensorDataset(torch.Tensor(X_task2), torch.LongTensor(y_task2)),
batch_size=32,
shuffle=True
)2. Convert the LoRA model to use custom Concrete ML layers
3. Compile a hybrid FHE model for the LoRA adapted PyTorch model
4. Train the model on private data
Additional options
Inference
Toggle LoRA layers
Last updated
Was this helpful?