Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a YAML based file format for pipelines #86

Merged
merged 28 commits into from
Jul 13, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
0435abb
Replace LLMBlock model_prompt param with model_family
markmc Jul 5, 2024
49c87d5
Add a PipelineContext class
markmc Jul 2, 2024
7cfbaa9
Fix multiprocessing issues in FilterByValueBlock
markmc Jul 10, 2024
9d92548
Fix multiprocessing issues in utilblocks
markmc Jul 11, 2024
23dd08e
Allow block_config.config_path to be relative
markmc Jul 11, 2024
9fc272c
Fix block_name handling
markmc Jul 11, 2024
8cb673b
Move FilterByValue multiprocessing config to PipelineContext
markmc Jul 11, 2024
b956643
Add `add_num_samples` to LLMBlock config
markmc Jul 8, 2024
18f1513
Fix LLMBlock batch_kwargs constructor param
markmc Jul 12, 2024
82aadd9
Remove batch_kwargs
markmc Jul 12, 2024
07c1c6d
Add a YAML based file format for pipelines
markmc Jul 5, 2024
003c8e8
Merge mmlu_bench block into synth_knowledge pipeline
markmc Jul 12, 2024
ab46552
Rename Pipeline.from_flows() to Pipeline.from_file()
markmc Jul 12, 2024
beabbf3
Move pipeline configs into a new directory structure
markmc Jul 12, 2024
ec94159
YAML format improvement - move block_name up
markmc Jul 12, 2024
2d92cf6
YAML format improvement - remove block_ prefix
markmc Jul 12, 2024
a0c9b80
Make "full" and "simple" aliases to a directory of pipeline configs
markmc Jul 12, 2024
eb2719f
YAML format improvement - change block_configs to blocks
markmc Jul 12, 2024
46f16c6
Add ImportBlock to allow extending existing pipelines
markmc Jul 12, 2024
82adb4a
generate_data: Allow pipeline arg to be a path to a directory
russellb Jul 12, 2024
5a0b7a6
llm: Set `n` by default in gen_kwargs
russellb Jul 12, 2024
7c5c1c3
pipelines: Add missing drop_duplicates for context in grounded skills
russellb Jul 12, 2024
04f7baa
filterblock: Document block behavior in more detail
russellb Jul 12, 2024
b8768ac
Undo changes to how `n` parameter is handled
russellb Jul 12, 2024
88f5003
Re-instate batch_kwargs.num_samples
markmc Jul 12, 2024
804ee3a
Interpret llmblock.config_path relative to the pipeline config path
markmc Jul 12, 2024
d1c5d5b
Ensure num_proc is passed as a keyword arg to Dataset.map()
markmc Jul 13, 2024
2c52770
fix: use string instead of boolean in YAML for "YES"
xukai92 Jul 13, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions src/instructlab/sdg/importblock.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# SPDX-License-Identifier: Apache-2.0
# Third Party
from datasets import Dataset

# Local
from . import pipeline
from .block import Block
from .logger_config import setup_logger

logger = setup_logger(__name__)


class ImportBlock(Block):
def __init__(
self,
ctx,
block_name,
path,
) -> None:
"""
ImportBlock imports a chain of blocks from another pipeline config file.

Parameters:
- ctx (PipelineContext): A PipelineContext object containing runtime parameters.
- block_name (str): An identifier for this block.
- path (str): A path (absolute, or relative to the instructlab.sdg package) to a pipeline config file.
"""
super().__init__(ctx, block_name)
self.path = path
self.pipeline = pipeline.Pipeline.from_file(self.ctx, self.path)

def generate(self, samples) -> Dataset:
logger.info("ImportBlock chaining to blocks from {self.path}")
return self.pipeline.generate(samples)
3 changes: 2 additions & 1 deletion src/instructlab/sdg/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import yaml

# Local
from . import filterblock, llmblock, utilblocks
from . import filterblock, importblock, llmblock, utilblocks
from .logger_config import setup_logger

logger = setup_logger(__name__)
Expand Down Expand Up @@ -85,6 +85,7 @@ def generate(self, dataset) -> Dataset:
"CombineColumnsBlock": utilblocks.CombineColumnsBlock,
"ConditionalLLMBlock": llmblock.ConditionalLLMBlock,
"FilterByValueBlock": filterblock.FilterByValueBlock,
"ImportBlock": importblock.ImportBlock,
"LLMBlock": llmblock.LLMBlock,
"SamplePopulatorBlock": utilblocks.SamplePopulatorBlock,
"SelectorBlock": utilblocks.SelectorBlock,
Expand Down
103 changes: 103 additions & 0 deletions tests/test_importblock.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
# Standard
from unittest.mock import MagicMock, patch
import os
import tempfile
import unittest

# Third Party
from datasets import Dataset, Features, Value

# First Party
from instructlab.sdg.importblock import ImportBlock
from instructlab.sdg.pipeline import Pipeline


class TestImportBlockWithMockPipeline(unittest.TestCase):
russellb marked this conversation as resolved.
Show resolved Hide resolved
@patch("instructlab.sdg.pipeline.Pipeline")
def setUp(self, mock_pipeline):
self.ctx = MagicMock()
self.block_name = "test_block"
self.path = "/path/to/config"
self.mock_pipeline = mock_pipeline
self.import_block = ImportBlock(self.ctx, self.block_name, self.path)
self.dataset = Dataset.from_dict({})

def test_initialization(self):
self.assertEqual(self.import_block.block_name, self.block_name)
self.assertEqual(self.import_block.path, self.path)
self.mock_pipeline.from_file.assert_called_once_with(self.ctx, self.path)

def test_generate(self):
self.mock_pipeline.from_file.return_value.generate.return_value = self.dataset
samples = self.import_block.generate(self.dataset)
self.mock_pipeline.from_file.return_value.generate.assert_called_once_with(
samples
)
self.assertEqual(samples, self.dataset)


_CHILD_YAML = """\
version: "1.0"
blocks:
- name: greater_than_thirty
type: FilterByValueBlock
config:
filter_column: age
filter_value: 30
operation: gt
convert_dtype: int
"""


_PARENT_YAML_FMT = """\
version: "1.0"
blocks:
- name: forty_or_under
type: FilterByValueBlock
config:
filter_column: age
filter_value: 40
operation: le
convert_dtype: int
- name: import_child
type: ImportBlock
config:
path: %s
- name: big_bdays
type: FilterByValueBlock
config:
filter_column: age
filter_value:
- 30
- 40
operation: eq
convert_dtype: int
"""


class TestImportBlockWithFilterByValue(unittest.TestCase):
def setUp(self):
self.ctx = MagicMock()
self.ctx.num_procs = 1
self.child_yaml = self._write_tmp_yaml(_CHILD_YAML)
self.parent_yaml = self._write_tmp_yaml(_PARENT_YAML_FMT % self.child_yaml)
self.dataset = Dataset.from_dict(
{"age": ["25", "30", "35", "40", "45"]},
features=Features({"age": Value("string")}),
)

def tearDown(self):
os.remove(self.parent_yaml)
os.remove(self.child_yaml)

def _write_tmp_yaml(self, content):
tmp_file = tempfile.NamedTemporaryFile(delete=False, mode="w", suffix=".yaml")
tmp_file.write(content)
tmp_file.close()
return tmp_file.name

def test_generate(self):
pipeline = Pipeline.from_file(self.ctx, self.parent_yaml)
filtered_dataset = pipeline.generate(self.dataset)
self.assertEqual(len(filtered_dataset), 1)
self.assertEqual(filtered_dataset["age"], [40])
Loading