Code Server Use Case
This guide will walk you through fine-tuning a pre-trained BERT model on the GLUE MRPC task using a GPU-enabled Code Server container.






Last updated
Was this helpful?
This guide will walk you through fine-tuning a pre-trained BERT model on the GLUE MRPC task using a GPU-enabled Code Server container.






Last updated
Was this helpful?
Was this helpful?
sudo apt update && sudo apt install -y python3 python3-pip python3-venv git python3 -m venv ~/venv
source ~/venv/bin/activate pip install --upgrade pip
pip install torch torchvision torchaudio scikit-learn scipy --index-url
https://download.pytorch.org/whl/cu121
pip install datasets evaluate accelerate cd /workspace
git clone https://github.com/huggingface/transformers.git
pip install –e . pip install scikit-learn scipy
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu121 cd /workspace/transformers/examples/pytorch/text-classification
python3 run_glue.py
--model_name_or_path bert-base-uncased
--task_name mrpc
--do_train
--do_eval
--per_device_train_batch_size 16
--learning_rate 2e-5
--num_train_epochs 3
--output_dir /tmp/bert-finetuned
--overwrite_output_dir from transformers import BertTokenizer, BertForSequenceClassification
import torch
# Load fine-tuned model and tokenizer
model_path = "/tmp/bert-finetuned"
model = BertForSequenceClassification.from_pretrained(model_path)
tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
# Move model to GPU
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
model.eval()
# Prepare test sentence
# sentence = "This is a demo for code server GPU Container!"
# inputs = tokenizer(sentence, return_tensors="pt").to(device)
sentence1 = "This is a great example!"
sentence2 = "This is a demo for code server GPU Container!"
inputs = tokenizer(sentence1, sentence2, return_tensors="pt").to(device)
# Run inference
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
predicted_class = torch.argmax(logits, dim=1).item()
# Map class to label (MRPC uses 0/1)
label_map = {0: "not paraphrase", 1: "paraphrase"}
print(f"Sentence: {sentence1}")
print(f"Sentence: {sentence2}")
print(f"Predicted Class: {predicted_class} ({label_map[predicted_class]})") python3 test.py