fix: enwik dataset fix

This commit is contained in:
Robin Meersman 2025-11-28 09:27:37 +01:00
parent fe207962de
commit 0577eee601
2 changed files with 34 additions and 12 deletions

View file

@ -1,25 +1,43 @@
from datasets import load_dataset
from torch.utils.data import Dataset
import torch
from os.path import curdir, join
from .Dataset import Dataset
from torch.utils.data import TensorDataset
from typing import Callable
class EnWik9DataSet(Dataset):
def __init__(self, root: str = "data", transform: Callable = None):
super().__init__(root, transform)
def __init__(self, root: str = "data", transform: Callable | None = None):
super().__init__()
self.transform = transform
# HuggingFace dataset: string text
path = join(curdir, root)
self._root = path
data = load_dataset("haukur/enwik9", cache_dir=path, split="train")
# Extract raw text
text = data["text"]
self.dataset = TensorDataset(text)
# Convert text (Python string) → bytes → tensor of ints 0255
# UTF-8 but non-ASCII bytes may exceed 255, so enforce modulo or ignore errors
byte_data = "".join(text).encode("utf-8", errors="replace")
self.data = torch.tensor(list(byte_data), dtype=torch.long)
# Model uses fixed 128-length context
self.context_length = 128
def __len__(self):
return len(self.dataset)
# number of sliding windows
return len(self.data) - self.context_length
def __getitem__(self, idx):
if self.transform is not None:
return self.transform(self.dataset[idx])
return self.dataset[idx]
# context window
x = self.data[idx : idx + self.context_length]
# next byte target
y = self.data[idx + self.context_length]
if self.transform:
x = self.transform(x)
return x, y