Skip to content

Commit

Permalink
fix test
Browse files Browse the repository at this point in the history
  • Loading branch information
StanChan03 committed Dec 27, 2024
1 parent c966d65 commit 20d7dea
Showing 1 changed file with 38 additions and 17 deletions.
55 changes: 38 additions & 17 deletions .github/tests/lm_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,15 +483,25 @@ def test_operator_cache(setup_models, model):
lm = LM(model="gpt-4o-mini", cache=cache)
lotus.settings.configure(lm=lm, enable_message_cache=True, enable_operator_cache=True)

batch = [
[{"role": "user", "content": "Hello, world!"}],
[{"role": "user", "content": "What is the capital of France?"}],
]
first_responses = lm(batch).outputs
data = {
"Course Name": [
"Dynamics and Control of Chemical Processes",
"Optimization Methods in Engineering",
"Chemical Kinetics and Catalysis",
"Transport Phenomena and Separations",
]
}

df = pd.DataFrame(data)
user_instruction = "What is a similar course to {Course Name}. Be concise?"

first_response = df.sem_map(df, user_instruction)
assert lm.stats.total_usage.operator_cache_hits == 0
second_responses = lm(batch).outputs
assert lm.stats.total_usage.operator_cache_hits == 2
assert first_responses == second_responses

second_response = df.sem_map(df, user_instruction)
assert lm.stats.total_usage.operator_cache_hits == 1

assert first_response == second_response


@pytest.mark.parametrize("model", get_enabled("gpt-4o-mini"))
Expand All @@ -502,19 +512,30 @@ def test_disable_operator_cache(setup_models, model):
lm = LM(model="gpt-4o-mini", cache=cache)
lotus.settings.configure(lm=lm, enable_message_cache=True, enable_operator_cache=False)

batch = [
[{"role": "user", "content": "Hello, world!"}],
[{"role": "user", "content": "What is the capital of France?"}],
]
lm(batch)
data = {
"Course Name": [
"Dynamics and Control of Chemical Processes",
"Optimization Methods in Engineering",
"Chemical Kinetics and Catalysis",
"Transport Phenomena and Separations",
]
}

df = pd.DataFrame(data)
user_instruction = "What is a similar course to {Course Name}. Be concise?"

first_response = df.sem_map(df, user_instruction)
assert lm.stats.total_usage.operator_cache_hits == 0
lm(batch)

second_response = df.sem_map(df, user_instruction)
assert lm.stats.total_usage.operator_cache_hits == 0

assert first_response == second_response

# Now enable operator cache. Note that the first batch is not cached.
lotus.settings.configure(enable_operator_cache=True)
first_responses = lm(batch).outputs
first_responses = df.sem_map(df, user_instruction)
assert lm.stats.total_usage.operator_cache_hits == 0
second_responses = lm(batch).outputs
assert lm.stats.total_usage.operator_cache_hits == 2
second_responses = df.sem_map(df, user_instruction)
assert lm.stats.total_usage.operator_cache_hits == 1
assert first_responses == second_responses

0 comments on commit 20d7dea

Please sign in to comment.