Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a YAML based file format for pipelines #86

Merged
merged 28 commits into from
Jul 13, 2024
Merged
Show file tree
Hide file tree
Changes from 11 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
0435abb
Replace LLMBlock model_prompt param with model_family
markmc Jul 5, 2024
49c87d5
Add a PipelineContext class
markmc Jul 2, 2024
7cfbaa9
Fix multiprocessing issues in FilterByValueBlock
markmc Jul 10, 2024
9d92548
Fix multiprocessing issues in utilblocks
markmc Jul 11, 2024
23dd08e
Allow block_config.config_path to be relative
markmc Jul 11, 2024
9fc272c
Fix block_name handling
markmc Jul 11, 2024
8cb673b
Move FilterByValue multiprocessing config to PipelineContext
markmc Jul 11, 2024
b956643
Add `add_num_samples` to LLMBlock config
markmc Jul 8, 2024
18f1513
Fix LLMBlock batch_kwargs constructor param
markmc Jul 12, 2024
82aadd9
Remove batch_kwargs
markmc Jul 12, 2024
07c1c6d
Add a YAML based file format for pipelines
markmc Jul 5, 2024
003c8e8
Merge mmlu_bench block into synth_knowledge pipeline
markmc Jul 12, 2024
ab46552
Rename Pipeline.from_flows() to Pipeline.from_file()
markmc Jul 12, 2024
beabbf3
Move pipeline configs into a new directory structure
markmc Jul 12, 2024
ec94159
YAML format improvement - move block_name up
markmc Jul 12, 2024
2d92cf6
YAML format improvement - remove block_ prefix
markmc Jul 12, 2024
a0c9b80
Make "full" and "simple" aliases to a directory of pipeline configs
markmc Jul 12, 2024
eb2719f
YAML format improvement - change block_configs to blocks
markmc Jul 12, 2024
46f16c6
Add ImportBlock to allow extending existing pipelines
markmc Jul 12, 2024
82adb4a
generate_data: Allow pipeline arg to be a path to a directory
russellb Jul 12, 2024
5a0b7a6
llm: Set `n` by default in gen_kwargs
russellb Jul 12, 2024
7c5c1c3
pipelines: Add missing drop_duplicates for context in grounded skills
russellb Jul 12, 2024
04f7baa
filterblock: Document block behavior in more detail
russellb Jul 12, 2024
b8768ac
Undo changes to how `n` parameter is handled
russellb Jul 12, 2024
88f5003
Re-instate batch_kwargs.num_samples
markmc Jul 12, 2024
804ee3a
Interpret llmblock.config_path relative to the pipeline config path
markmc Jul 12, 2024
d1c5d5b
Ensure num_proc is passed as a keyword arg to Dataset.map()
markmc Jul 13, 2024
2c52770
fix: use string instead of boolean in YAML for "YES"
xukai92 Jul 13, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions scripts/test_freeform_skills.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@

# First Party
from src.instructlab.sdg import SDG
from src.instructlab.sdg.default_flows import SynthSkillsFlow
from src.instructlab.sdg.pipeline import Pipeline
from src.instructlab.sdg.pipeline import (
SYNTH_FREEFORM_SKILLS_FLOW,
Pipeline,
PipelineContext,
)

# for vLLM endpoints, the api_key remains "EMPTY"
openai_api_key = "EMPTY"
Expand Down Expand Up @@ -49,8 +52,9 @@

ds = Dataset.from_list(samples)

skills_flow = SynthSkillsFlow(client, "mixtral", teacher_model, 1).get_flow()
skills_pipe = Pipeline(skills_flow)
ctx = PipelineContext(client, "mixtral", teacher_model, 1)

skills_pipe = Pipeline.from_flows(ctx, [SYNTH_FREEFORM_SKILLS_FLOW])

sdg = SDG([skills_pipe])
gen_data = sdg.generate(ds)
Expand Down
12 changes: 8 additions & 4 deletions scripts/test_grounded_skills.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@

# First Party
from src.instructlab.sdg import SDG
from src.instructlab.sdg.default_flows import SynthGroundedSkillsFlow
from src.instructlab.sdg.pipeline import Pipeline
from src.instructlab.sdg.pipeline import (
SYNTH_GROUNDED_SKILLS_FLOW,
Pipeline,
PipelineContext,
)

# for vLLM endpoints, the api_key remains "EMPTY"
openai_api_key = "EMPTY"
Expand Down Expand Up @@ -97,8 +100,9 @@

ds = Dataset.from_list(samples)

skills_flow = SynthGroundedSkillsFlow(client, "mixtral", teacher_model, 10).get_flow()
skills_pipe = Pipeline(skills_flow)
ctx = PipelineContext(client, "mixtral", teacher_model, 10)

skills_pipe = Pipeline.from_flows(ctx, [SYNTH_GROUNDED_SKILLS_FLOW])

sdg = SDG([skills_pipe])
gen_data = sdg.generate(ds)
Expand Down
17 changes: 10 additions & 7 deletions scripts/test_knowledge.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@

# First Party
from src.instructlab.sdg import SDG
from src.instructlab.sdg.default_flows import MMLUBenchFlow, SynthKnowledgeFlow
from src.instructlab.sdg.pipeline import Pipeline
from src.instructlab.sdg.pipeline import (
MMLU_BENCH_FLOW,
SYNTH_KNOWLEDGE_FLOW,
Pipeline,
PipelineContext,
)

# Please don't add you vLLM endpoint key here
openai_api_key = "EMPTY"
Expand Down Expand Up @@ -38,12 +42,11 @@

ds = Dataset.from_list(samples)

mmlu_flow = MMLUBenchFlow(client, "mixtral", teacher_model, 1).get_flow()
knowledge_flow = SynthKnowledgeFlow(client, "mixtral", teacher_model, 1).get_flow()
knowledge_pipe = Pipeline(knowledge_flow)
mmlu_pipe = Pipeline(mmlu_flow)
ctx = PipelineContext(client, "mixtral", teacher_model, 1)

knowledge_pipe = Pipeline.from_flows(ctx, [MMLU_BENCH_FLOW, SYNTH_KNOWLEDGE_FLOW])

sdg = SDG([mmlu_pipe, knowledge_pipe])
sdg = SDG([knowledge_pipe])
mmlubench_data = sdg.generate(ds)

print(mmlubench_data)
Expand Down
9 changes: 8 additions & 1 deletion src/instructlab/sdg/block.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from abc import ABC
from collections import ChainMap
from typing import Any, Dict, Union
import os.path

# Third Party
import yaml
Expand All @@ -14,7 +15,8 @@


class Block(ABC):
def __init__(self, block_name: str) -> None:
def __init__(self, ctx, block_name: str) -> None:
self.ctx = ctx
self.block_name = block_name

@staticmethod
Expand All @@ -41,8 +43,13 @@ def _load_config(self, config_path: str) -> Union[Dict[str, Any], None]:
"""
Load the configuration file for this block.

If the supplied configuration file is a relative path, it is assumed
to be part of this Python package.

:param config_path: The path to the configuration file.
:return: The loaded configuration.
"""
if not os.path.isabs(config_path):
config_path = os.path.join(self.ctx.sdg_base, config_path)
with open(config_path, "r", encoding="utf-8") as config_file:
return yaml.safe_load(config_file)
Loading