|
| 1 | +# Standard |
| 2 | +import json |
| 3 | +import os |
| 4 | + |
| 5 | +# Third Party |
| 6 | +from datasets import Dataset |
| 7 | +import pytest |
| 8 | + |
| 9 | +# First Party |
| 10 | +from instructlab.sdg.checkpointing import Checkpointer |
| 11 | + |
| 12 | + |
| 13 | +def _add_bar(sample, add_value=100): |
| 14 | + sample["bar"] = sample["foo"] + add_value |
| 15 | + return sample |
| 16 | + |
| 17 | + |
| 18 | +def _populate_checkpoints(tmpdir, dataset, checkpoints_count, remove_column): |
| 19 | + for i in range(0, checkpoints_count): |
| 20 | + checkpoint_dataset = dataset.select(range(i * 10, (i + 1) * 10)) |
| 21 | + checkpoint_dataset = checkpoint_dataset.map( |
| 22 | + lambda x: _add_bar(x, add_value=100) |
| 23 | + ) |
| 24 | + if remove_column: |
| 25 | + checkpoint_dataset = checkpoint_dataset.remove_columns("foo") |
| 26 | + checkpoint_dataset.to_json( |
| 27 | + os.path.join(tmpdir, f"data_checkpoint_abcde{i}.jsonl"), |
| 28 | + orient="records", |
| 29 | + lines=True, |
| 30 | + ) |
| 31 | + |
| 32 | + |
| 33 | +def _validate_checkpoints(tmpdir, expected_files_count, expected_length, remove_column): |
| 34 | + saved_files = os.listdir(tmpdir) |
| 35 | + assert len(saved_files) == expected_files_count |
| 36 | + assert all(f.startswith("data_checkpoint_") for f in saved_files) |
| 37 | + assert all(f.endswith(".jsonl") for f in saved_files) |
| 38 | + |
| 39 | + for f in saved_files: |
| 40 | + with open(os.path.join(tmpdir, f), "r") as f: |
| 41 | + l = list(f) |
| 42 | + if isinstance(expected_length, list): |
| 43 | + expected_length.remove(len(l)) |
| 44 | + else: |
| 45 | + assert len(l) == expected_length |
| 46 | + for s in l: |
| 47 | + data = json.loads(s) |
| 48 | + if remove_column: |
| 49 | + assert "foo" not in data and "bar" in data |
| 50 | + else: |
| 51 | + assert "foo" in data and "bar" in data |
| 52 | + |
| 53 | + |
| 54 | +@pytest.mark.parametrize( |
| 55 | + "save_freq, remove_column, dataset_size, init_checkpoints, splits, final_checkpoints, checkpoint_length", |
| 56 | + [ |
| 57 | + (1, False, 10, 0, 0, 1, 10), |
| 58 | + (1, True, 10, 0, 0, 1, 10), |
| 59 | + (1, False, 100, 1, 9, 10, 10), |
| 60 | + (1, True, 100, 1, 9, 10, 10), |
| 61 | + (1, False, 100, 2, 8, 10, 10), |
| 62 | + (3, False, 100, 2, 8, 5, [10, 10, 30, 30, 20]), |
| 63 | + ], |
| 64 | +) |
| 65 | +def test_checkpointing( |
| 66 | + tmpdir, |
| 67 | + save_freq, |
| 68 | + remove_column, |
| 69 | + dataset_size, |
| 70 | + init_checkpoints, |
| 71 | + splits, |
| 72 | + final_checkpoints, |
| 73 | + checkpoint_length, |
| 74 | +): |
| 75 | + # Our initial dataset |
| 76 | + dataset = Dataset.from_list([{"idx": i, "foo": i} for i in range(dataset_size)]) |
| 77 | + |
| 78 | + # Generate and save some checkpoints to disk |
| 79 | + _populate_checkpoints(tmpdir, dataset, init_checkpoints, remove_column) |
| 80 | + |
| 81 | + # Load checkpoints, giving us the remaining dataset to process and |
| 82 | + # the generated data loaded from the checkpoints |
| 83 | + checkpointer = Checkpointer(checkpoint_dir=tmpdir, save_freq=save_freq) |
| 84 | + dataset, pre_generated_data = checkpointer.load(dataset) |
| 85 | + |
| 86 | + # Should be present, even if removed from the checkpoint (remove_column=True) |
| 87 | + assert "foo" in dataset.features |
| 88 | + |
| 89 | + # When testing save_freq, we will have checkpoints of different lengths |
| 90 | + if isinstance(checkpoint_length, list): |
| 91 | + checkpoints_total = sum(checkpoint_length[:init_checkpoints]) |
| 92 | + else: |
| 93 | + checkpoints_total = checkpoint_length * init_checkpoints |
| 94 | + |
| 95 | + # Validate pre-generated data loaded from the checkpoints |
| 96 | + assert len(dataset) == (dataset_size - checkpoints_total) |
| 97 | + if init_checkpoints > 0: |
| 98 | + assert len(pre_generated_data) == checkpoints_total |
| 99 | + |
| 100 | + # Apply pipeline to the remaining dataset and save checkpoints |
| 101 | + if splits: |
| 102 | + for i in range(0, splits): |
| 103 | + split = dataset.select(range(i * 10, (i + 1) * 10)) |
| 104 | + split = split.map(lambda x: _add_bar(x, add_value=100)) |
| 105 | + if remove_column: |
| 106 | + split = split.remove_columns("foo") |
| 107 | + checkpointer.checkpoint(split) |
| 108 | + else: |
| 109 | + dataset = dataset.map(lambda x: _add_bar(x, add_value=10)) |
| 110 | + if remove_column: |
| 111 | + dataset = dataset.remove_columns("foo") |
| 112 | + checkpointer.checkpoint(dataset) |
| 113 | + |
| 114 | + checkpointer.done() |
| 115 | + |
| 116 | + # Validate that all checkpoints are now saved to disk |
| 117 | + _validate_checkpoints(tmpdir, final_checkpoints, checkpoint_length, remove_column) |
0 commit comments