diff --git a/.gitignore b/.gitignore index 132426bd..badeae4a 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ node_modules .env .env.* +!.env.example .DS_Store server.py **/.DS_Store @@ -12,5 +13,7 @@ courses.zip html.zip build/html/* tmp -__pycache__ -.venv \ No newline at end of file +*.pyc +__pycache__/ +.pytest_cache/ +.venv diff --git a/asciidoc/courses/genai-workshop-graphrag/modules/2-neo4j-graphrag/lessons/4-hybrid-retriever/reset.cypher b/asciidoc/courses/genai-workshop-graphrag/modules/2-neo4j-graphrag/lessons/4-hybrid-retriever/reset.cypher new file mode 100644 index 00000000..19fc4a13 --- /dev/null +++ b/asciidoc/courses/genai-workshop-graphrag/modules/2-neo4j-graphrag/lessons/4-hybrid-retriever/reset.cypher @@ -0,0 +1,3 @@ +CREATE FULLTEXT INDEX plotFulltext IF NOT EXISTS +FOR (m:Movie) +ON EACH [m.plot] \ No newline at end of file diff --git a/asciidoc/courses/genai-workshop-graphrag/modules/2-neo4j-graphrag/lessons/7-multimodal-graphrag/reset.cypher b/asciidoc/courses/genai-workshop-graphrag/modules/2-neo4j-graphrag/lessons/7-multimodal-graphrag/reset.cypher new file mode 100644 index 00000000..a31ad704 --- /dev/null +++ b/asciidoc/courses/genai-workshop-graphrag/modules/2-neo4j-graphrag/lessons/7-multimodal-graphrag/reset.cypher @@ -0,0 +1,12 @@ +LOAD CSV WITH HEADERS FROM "https://data.neo4j.com/rec-embed/movie-poster-embeddings-1k.csv" AS row +match (m:Movie {movieId:row.movieId}) +WITH row,m +CALL db.create.setNodeVectorProperty(m, 'posterEmbedding', apoc.convert.fromJsonList(row.posterEmbedding)); + +CREATE VECTOR INDEX moviePosters IF NOT EXISTS +FOR (m:Movie) +ON m.posterEmbedding +OPTIONS {indexConfig: { + `vector.dimensions`: 512, + `vector.similarity_function`: 'cosine' +}}; \ No newline at end of file diff --git a/asciidoc/courses/llm-chatbot-python/modules/1-project-setup/lessons/2-setup/lesson.adoc b/asciidoc/courses/llm-chatbot-python/modules/1-project-setup/lessons/2-setup/lesson.adoc index 204f13f0..fd34c4ea 100644 --- a/asciidoc/courses/llm-chatbot-python/modules/1-project-setup/lessons/2-setup/lesson.adoc +++ b/asciidoc/courses/llm-chatbot-python/modules/1-project-setup/lessons/2-setup/lesson.adoc @@ -133,7 +133,7 @@ The `write_message()` helper function has been link:{repository-blob}/main/utils .Write Message Helper Function [source,python] ---- -include::{repository-raw}/{branch}/utils.py[tag=write_message] +include::{repository-raw}/{branch}/solutions/utils.py[tag=write_message] ---- The function accepts two positional arguments - the `role` of the author, either `human` or `assistant`, and the message. diff --git a/asciidoc/courses/llm-fundamentals/.env.example b/asciidoc/courses/llm-fundamentals/.env.example new file mode 100644 index 00000000..abf6b815 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/.env.example @@ -0,0 +1,5 @@ +OPENAI_API_KEY="sk-..." +NEO4J_URI="bolt://" +NEO4J_USERNAME="neo4j" +NEO4J_PASSWORD="" +LANGSMITH_API_KEY="lsv2_..." \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/conftest.py b/asciidoc/courses/llm-fundamentals/conftest.py new file mode 100644 index 00000000..a2553d27 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/conftest.py @@ -0,0 +1,56 @@ +import builtins +import importlib +import io +import sys + +import pytest +from pytest import MonkeyPatch + +from dotenv import load_dotenv + +@pytest.fixture(autouse=True) +def load_env_vars(): + load_dotenv() + +class TestHelpers: + @staticmethod + def run_module(monkeypatch: MonkeyPatch, module_name: str, input_values: list[str] = [] ) -> str: + """ + Runs a module (.py file) to completion. + stdin input_values can be provided as a list of strings. + stdout output is returned as a string. + """ + + def mocked_input(prompt: str = "", return_values: list[str] = input_values): + if len(return_values) == 0: + raise Exception("Test error - Ran out of input values") + return return_values.pop(0) + + mocked_stdout = io.StringIO() + + with monkeypatch.context() as m: + m.setattr(builtins, "input", mocked_input) + m.setattr(sys, "stdout", mocked_stdout) + + sys.modules.pop(module_name, None) + importlib.import_module(name=module_name, package="files") + + return mocked_stdout.getvalue().strip() + + @staticmethod + def run_cypher_file(graph, file_path): + with open(file_path, "r") as file: + cyphers = file.read() + result = [] + for cypher in cyphers.split(";"): + if cypher.strip() != "": + result.append(TestHelpers.run_cypher(graph, cypher)) + return result + + @staticmethod + def run_cypher(graph, cypher): + return graph.query(cypher) + +@pytest.fixture() +def test_helpers(): + return TestHelpers diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/1-langchain/code/example_applcation.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/1-langchain/code/example_applcation.py index a1450acb..b5572efc 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/1-langchain/code/example_applcation.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/1-langchain/code/example_applcation.py @@ -1,3 +1,5 @@ +import os + from langchain_openai import ChatOpenAI from langchain.agents import AgentExecutor, create_react_agent from langchain.tools import Tool @@ -11,12 +13,14 @@ SESSION_ID = str(uuid4()) print(f"Session ID: {SESSION_ID}") -llm = ChatOpenAI(openai_api_key="sk-...") +llm = ChatOpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") + ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) prompt = ChatPromptTemplate.from_messages( @@ -53,9 +57,8 @@ def get_memory(session_id): history_messages_key="chat_history", ) -while True: - q = input("> ") - +while (q := input("> ")) != "exit": + response = cypher_agent.invoke( { "input": q diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/1-langchain/code/test_example_applcation.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/1-langchain/code/test_example_applcation.py new file mode 100644 index 00000000..5ddfe0e4 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/1-langchain/code/test_example_applcation.py @@ -0,0 +1,8 @@ +def test_example_applcation(test_helpers, monkeypatch): + + output = test_helpers.run_module(monkeypatch, "example_applcation", ["hello", "exit"]) + + # Test a response was received from the agent + # There is a output which looks like Session ID: #####\n[response from LLM]\n + assert len(output.split("\n")) == 2 + diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm.py index 49be1313..5835ec58 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm.py @@ -1,6 +1,7 @@ +import os from langchain_openai import OpenAI -llm = OpenAI(openai_api_key="sk-...") +llm = OpenAI(openai_api_key=os.getenv("OPENAI_API_KEY")) response = llm.invoke("What is Neo4j?") diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm_config.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm_config.py new file mode 100644 index 00000000..ba7de884 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm_config.py @@ -0,0 +1,16 @@ +import os +from langchain_openai import OpenAI + +# tag::config[] +llm = OpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY"), + model="gpt-3.5-turbo-instruct", + temperature=0 +) +# end::config[] + +response = llm.invoke("What is Neo4j?") + +print(response) + + diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm_prompt.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm_prompt.py index 8ea214a2..96fa0db9 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm_prompt.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/llm_prompt.py @@ -1,8 +1,12 @@ +import os from langchain_openai import OpenAI -from langchain.prompts import PromptTemplate +# tag::import[] +from langchain.prompts import PromptTemplate# +# end::import[] -llm = OpenAI(openai_api_key="sk-...") +llm = OpenAI(openai_api_key=os.getenv("OPENAI_API_KEY")) +# tag::template[] template = PromptTemplate(template=""" You are a cockney fruit and vegetable seller. Your role is to assist your customer with their fruit and vegetable needs. @@ -10,7 +14,10 @@ Tell me about the following fruit: {fruit} """, input_variables=["fruit"]) +# end::template[] +# tag::invoke[] response = llm.invoke(template.format(fruit="apple")) -print(response) \ No newline at end of file +print(response) +# end::invoke[] \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/test_llm.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/test_llm.py new file mode 100644 index 00000000..28bde27e --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/code/test_llm.py @@ -0,0 +1,17 @@ +def test_llm(test_helpers, monkeypatch): + + output = test_helpers.run_module(monkeypatch, "llm") + + assert output > "" + +def test_llm_prompt(test_helpers, monkeypatch): + + output = test_helpers.run_module(monkeypatch, "llm_prompt") + + assert output > "" + +def test_llm_config(test_helpers, monkeypatch): + + output = test_helpers.run_module(monkeypatch, "llm_config") + + assert output > "" diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/lesson.adoc b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/lesson.adoc index 0be1bbbf..9f9171f7 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/lesson.adoc +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2-initialising-the-llm/lesson.adoc @@ -60,18 +60,12 @@ Create a new Python program and copy this code into a new Python file. [source,python] ---- -from langchain_openai import OpenAI - -llm = OpenAI(openai_api_key="sk-...") - -response = llm.invoke("What is Neo4j?") - -print(response) +include::code/llm.py[tag=**] ---- [IMPORTANT] .OpenAI API Key -Remember to include your OpenAI API key in the `openai_api_key` parameter. +Create an environment variable called `OPENAI_API_KEY` and set it to your OpenAI API key. Review the program and note the following: @@ -109,24 +103,16 @@ Modify your program to use the prompt template: [source,python] ---- -from langchain.prompts import PromptTemplate +include::code/llm_prompt.py[tag=import] -template = PromptTemplate(template=""" -You are a cockney fruit and vegetable seller. -Your role is to assist your customer with their fruit and vegetable needs. -Respond using cockney rhyming slang. - -Tell me about the following fruit: {fruit} -""", input_variables=["fruit"]) +include::code/llm_prompt.py[tag=template] ---- Call the LLM, passing the formatted prompt template as the input: [source,python] ---- -response = llm.invoke(template.format(fruit="apple")) - -print(response) +include::code/llm_prompt.py[tag=invoke] ---- You use the `format` method to pass the parameters to the prompt e.g. `fruit="apple"`. The input variables will be validated when the prompt is formatted, and a `KeyError` will be raised if any variables are missing from the input. @@ -158,11 +144,7 @@ When you create the LLM, you can configure it with parameters such as the `tempe [source,python] ---- -llm = OpenAI( - openai_api_key="sk-...", - model="gpt-3.5-turbo-instruct", - temperature=0 -) +include::code/llm_config.py[tag=config] ---- When selecting a model, it is worth considering the quality of the output and the cost per token. There are several link:https://platform.openai.com/docs/models/overview[OpenAI models^] available, each with different characteristics. diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain.py index d4d3c96c..6bcfa357 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain.py @@ -1,7 +1,10 @@ +import os from langchain_openai import OpenAI from langchain.prompts import PromptTemplate -llm = OpenAI(openai_api_key="sk-...") +llm = OpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") + ) template = PromptTemplate.from_template(""" You are a cockney fruit and vegetable seller. diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output.py index 0b7e774e..f0d32b46 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output.py @@ -1,3 +1,4 @@ +import os from langchain_openai import OpenAI from langchain.prompts import PromptTemplate # tag::import[] @@ -5,7 +6,8 @@ # end::import[] llm = OpenAI( - openai_api_key="sk-...") + openai_api_key=os.getenv("OPENAI_API_KEY") + ) template = PromptTemplate.from_template(""" You are a cockney fruit and vegetable seller. diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output_json.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output_json.py index 10fabf99..986dcd3e 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output_json.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/llm_chain_output_json.py @@ -1,3 +1,4 @@ +import os from langchain_openai import OpenAI from langchain.prompts import PromptTemplate # tag::import[] @@ -5,7 +6,8 @@ # end::import[] llm = OpenAI( - openai_api_key="sk-...") + openai_api_key=os.getenv("OPENAI_API_KEY") + ) template = PromptTemplate.from_template(""" You are a cockney fruit and vegetable seller. diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/test_llm_chain.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/test_llm_chain.py new file mode 100644 index 00000000..bc4a2802 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/2.5-chains/code/test_llm_chain.py @@ -0,0 +1,11 @@ +def test_llm_chain(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "llm_chain") > "" + +def test_llm_chain_output(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "llm_chain_output") > "" + +def test_llm_chain_output_json(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "llm_chain_output_json") > "" diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-chain.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-chain.py index 7f141953..28991cd4 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-chain.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-chain.py @@ -1,8 +1,11 @@ +import os from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate from langchain.schema import StrOutputParser -chat_llm = ChatOpenAI(openai_api_key="sk-...") +chat_llm = ChatOpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") +) prompt = ChatPromptTemplate.from_messages( [ diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-context.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-context.py index 78d77757..a6c92362 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-context.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model-context.py @@ -1,8 +1,11 @@ +import os from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate from langchain.schema import StrOutputParser -chat_llm = ChatOpenAI(openai_api_key="sk-...") +chat_llm = ChatOpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") +) prompt = ChatPromptTemplate.from_messages( [ diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model.py index d5da649d..936d010f 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/chat-model.py @@ -1,11 +1,12 @@ # tag::import[] +import os from langchain_openai import ChatOpenAI from langchain_core.messages import HumanMessage, SystemMessage # end::import[] # tag::llm[] chat_llm = ChatOpenAI( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) # end::llm[] diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/test_chat-model.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/test_chat-model.py new file mode 100644 index 00000000..5c4750df --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3-chat-models/code/test_chat-model.py @@ -0,0 +1,11 @@ +def test_chat_model(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "chat-model") > "" + +def test_chat_model_chain(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "chat-model-chain") > "" + +def test_chat_model_context(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "chat-model-context") > "" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.5-memory/code/chat-model-memory.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.5-memory/code/chat-model-memory.py index 1d2b372b..f4a0cbbd 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.5-memory/code/chat-model-memory.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.5-memory/code/chat-model-memory.py @@ -1,3 +1,4 @@ +import os from langchain_openai import ChatOpenAI # tag::import-messages[] from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder @@ -10,7 +11,9 @@ from langchain_core.runnables.history import RunnableWithMessageHistory # end::import-runnable[] -chat_llm = ChatOpenAI(openai_api_key="sk-...") +chat_llm = ChatOpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") +) # tag::prompt[] prompt = ChatPromptTemplate.from_messages( @@ -55,8 +58,7 @@ def get_memory(session_id): }""" # tag::loop[] -while True: - question = input("> ") +while (question := input("> ")) != "exit": response = chat_with_message_history.invoke( { diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.5-memory/code/test_chat-model-memory.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.5-memory/code/test_chat-model-memory.py new file mode 100644 index 00000000..070e8228 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.5-memory/code/test_chat-model-memory.py @@ -0,0 +1,7 @@ +def test_chat_model_memory(test_helpers, monkeypatch): + + assert test_helpers.run_module( + monkeypatch, + "chat-model-memory", + ["Whats happening at Fistral?", "exit"] + ) > "" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/chat-model-memory-neo4j.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/chat-model-memory-neo4j.py index 75e037d6..2174ba04 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/chat-model-memory-neo4j.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/chat-model-memory-neo4j.py @@ -1,3 +1,4 @@ +import os from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.schema import StrOutputParser @@ -15,13 +16,15 @@ print(f"Session ID: {SESSION_ID}") # end::session-id[] -chat_llm = ChatOpenAI(openai_api_key="sk-...") +chat_llm = ChatOpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") +) # tag::neo4j-graph[] graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) # end::neo4j-graph[] @@ -60,9 +63,8 @@ def get_memory(session_id): ] }""" -while True: - question = input("> ") - +while (question := input("> ")) != "exit": + # tag::invoke[] response = chat_with_message_history.invoke( { diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/connect-to-neo4j.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/connect-to-neo4j.py index 7a47cdb6..c004b030 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/connect-to-neo4j.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/connect-to-neo4j.py @@ -1,9 +1,10 @@ +import os from langchain_neo4j import Neo4jGraph graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) result = graph.query(""" diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/test_chat-model-memory-neo4j.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/test_chat-model-memory-neo4j.py new file mode 100644 index 00000000..a72602ea --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/test_chat-model-memory-neo4j.py @@ -0,0 +1,11 @@ +def test_chat_model_memory_neo4j(test_helpers, monkeypatch): + + output = test_helpers.run_module( + monkeypatch, + "chat-model-memory-neo4j", + ["Whats happening at Fistral?", "exit"] + ) + + # Test a response was received from the agent + # There is a output which looks like Session ID: #####\n[response from LLM]\n + assert len(output.split("\n")) == 2 \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/test_connect-to-neo4j.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/test_connect-to-neo4j.py new file mode 100644 index 00000000..30bf4abc --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/code/test_connect-to-neo4j.py @@ -0,0 +1,3 @@ +def test_connect_to_neo4j(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "connect-to-neo4j") > "" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/lesson.adoc b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/lesson.adoc index 3e62cace..daa30b23 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/lesson.adoc +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/3.7-persist-memory/lesson.adoc @@ -4,7 +4,7 @@ :disable-cache: true -Langchain includes functionality to integrate directly with Neo4j, including allowing you to run Cypher statements, query vector indexes and use Neo4j as a conversation memory store. +LangChain includes functionality to integrate directly with Neo4j, including allowing you to run Cypher statements, query vector indexes and use Neo4j as a conversation memory store. In this lesson, you will learn how to connect to and use a Neo4j database as a conversation memory store. @@ -21,20 +21,24 @@ include::code/connect-to-neo4j.py[] You can connect to the Neo4j sandbox created for you when you joined the course. -Update the code above to use the `url`, `username` and `password` of your Neo4j sandbox. +[IMPORTANT] +.Neo4j Connection Details +==== +Create environment variables for your Neo4j connection. -Connection URL:: [copy]#bolt://{sandbox-ip}:{sandbox-boltPort}# -Username:: [copy]#{sandbox-username}# -Password:: [copy]#{sandbox-password}# +NEO4J_URI:: [copy]#bolt://{sandbox-ip}:{sandbox-boltPort}# +NEO4J_USERNAME:: [copy]#{sandbox-username}# +NEO4J_PASSWORD:: [copy]#{sandbox-password}# +==== Run the query - you should see data about the movie Toy Story. The `Neo4jGraph` class is a wrapper to the link:https://neo4j.com/docs/python-manual/current/[Neo4j Python driver^]. -It simplifies connecting to Neo4j and integrating with the Langchain framework. +It simplifies connecting to Neo4j and integrating with the LangChain framework. === Schema -When you connect to the Neo4j database, the object loads the database schema into memory - this enables Langchain to access the schema information without having to query the database. +When you connect to the Neo4j database, the object loads the database schema into memory - this enables LangChain to access the schema information without having to query the database. You can access the schema information using the `schema` property. diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent-trailer.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent-trailer.py index 370c8b57..2b1d14b4 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent-trailer.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent-trailer.py @@ -1,3 +1,4 @@ +import os from langchain_openai import ChatOpenAI from langchain.agents import AgentExecutor, create_react_agent from langchain.tools import Tool @@ -14,12 +15,14 @@ SESSION_ID = str(uuid4()) print(f"Session ID: {SESSION_ID}") -llm = ChatOpenAI(openai_api_key="sk-...") +llm = ChatOpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") +) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) prompt = ChatPromptTemplate.from_messages( @@ -73,9 +76,8 @@ def call_trailer_search(input): history_messages_key="chat_history", ) -while True: - q = input("> ") - +while (q := input("> ")) != "exit": + response = chat_agent.invoke( { "input": q diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent.py index 7a210c01..68af2629 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/chat-agent.py @@ -1,3 +1,4 @@ +import os from langchain_openai import ChatOpenAI from langchain.agents import AgentExecutor, create_react_agent from langchain.tools import Tool @@ -11,12 +12,14 @@ SESSION_ID = str(uuid4()) print(f"Session ID: {SESSION_ID}") -llm = ChatOpenAI(openai_api_key="sk-...") +llm = ChatOpenAI( + openai_api_key=os.getenv("OPENAI_API_KEY") +) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) prompt = ChatPromptTemplate.from_messages( @@ -59,8 +62,7 @@ def get_memory(session_id): ) #end::chat_agent[] -while True: - q = input("> ") +while (q := input("> ")) != "exit": response = chat_agent.invoke( { diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/test_agent.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/test_agent.py new file mode 100644 index 00000000..fe9af691 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/4-agents/code/test_agent.py @@ -0,0 +1,23 @@ +def test_chat_agent(test_helpers, monkeypatch): + + output = test_helpers.run_module( + monkeypatch, + "chat-agent", + ["Find a movie about the meaning of life", "exit"] + ) + + # Test a response was received from the agent + # There is a output which looks like Session ID: #####\n[response from LLM]\n + assert len(output.split("\n")) == 2 + +def test_chat_agent_trailer(test_helpers, monkeypatch): + + output = test_helpers.run_module( + monkeypatch, + "chat-agent-trailer", + ["Find the movie trailer for the Matrix.", "exit"] + ) + + # Test a response was received from the agent + # There is a output which looks like Session ID: #####\n[response from LLM]\n + assert len(output.split("\n")) >= 2 \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/create_index.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/create_index.py index 2dbf022b..603d0085 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/create_index.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/create_index.py @@ -1,3 +1,4 @@ +import os from langchain_openai import OpenAIEmbeddings from langchain_neo4j import Neo4jGraph, Neo4jVector from langchain.schema import Document @@ -12,13 +13,13 @@ # Service used to create the embeddings embedding_provider = OpenAIEmbeddings( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) new_vector = Neo4jVector.from_documents( diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/query_vector.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/query_vector.py index 96a47299..32d15194 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/query_vector.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/query_vector.py @@ -1,14 +1,15 @@ +import os from langchain_openai import OpenAIEmbeddings from langchain_neo4j import Neo4jGraph, Neo4jVector embedding_provider = OpenAIEmbeddings( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) movie_plot_vector = Neo4jVector.from_existing_index( diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/retreiver_chain.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/retreiver_chain.py index 8dcc4686..49d58b92 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/retreiver_chain.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/retreiver_chain.py @@ -1,17 +1,22 @@ +import os from langchain.chains import RetrievalQA from langchain_openai import ChatOpenAI, OpenAIEmbeddings from langchain_neo4j import Neo4jGraph, Neo4jVector -OPENAI_API_KEY = "sk-..." +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") -llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY) +llm = ChatOpenAI( + openai_api_key=OPENAI_API_KEY +) -embedding_provider = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY) +embedding_provider = OpenAIEmbeddings( + openai_api_key=OPENAI_API_KEY +) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) movie_plot_vector = Neo4jVector.from_existing_index( diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_create_index.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_create_index.py new file mode 100644 index 00000000..395d1fcb --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_create_index.py @@ -0,0 +1,21 @@ +def test_create_index(test_helpers, monkeypatch): + import os + from langchain_neo4j import Neo4jGraph + + test_helpers.run_module( + monkeypatch, + "create_index" + ) + + graph = Neo4jGraph( + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") + ) + + result = test_helpers.run_cypher( + graph, + "SHOW VECTOR INDEXES WHERE name = 'myVectorIndex'" + ) + + assert len(result) == 1 diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_query_vector.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_query_vector.py new file mode 100644 index 00000000..35b7a37e --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_query_vector.py @@ -0,0 +1,20 @@ +def test_query_vector(test_helpers, monkeypatch): + import os + import pathlib + from langchain_neo4j import Neo4jGraph + + graph = Neo4jGraph( + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") + ) + + test_helpers.run_cypher_file( + graph, + os.path.join(pathlib.Path(__file__).parent.resolve(), "..", "reset.cypher") + ) + + assert test_helpers.run_module( + monkeypatch, + "query_vector" + ) > "" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_retreiver_chain.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_retreiver_chain.py new file mode 100644 index 00000000..c87f41e6 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/6-retrievers/code/test_retreiver_chain.py @@ -0,0 +1,3 @@ +def test_retreiver_chain(test_helpers, monkeypatch): + + assert test_helpers.run_module(monkeypatch, "retreiver_chain") > "" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/7-c-neo4j-vector-retriever/code/chat-agent-retriever.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/7-c-neo4j-vector-retriever/code/chat-agent-retriever.py index 16df6d3a..97a75a40 100644 --- a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/7-c-neo4j-vector-retriever/code/chat-agent-retriever.py +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/7-c-neo4j-vector-retriever/code/chat-agent-retriever.py @@ -1,3 +1,4 @@ +import os from langchain_openai import ChatOpenAI, OpenAIEmbeddings from langchain.chains import RetrievalQA from langchain.agents import AgentExecutor, create_react_agent @@ -13,16 +14,16 @@ SESSION_ID = str(uuid4()) print(f"Session ID: {SESSION_ID}") -OPENAI_API_KEY = "sk-..." +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY) embedding_provider = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein" + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) prompt = ChatPromptTemplate.from_messages( @@ -88,8 +89,7 @@ def call_trailer_search(input): history_messages_key="chat_history", ) -while True: - q = input("> ") +while (q := input("> ")) != "exit": response = chat_agent.invoke( { diff --git a/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/7-c-neo4j-vector-retriever/code/test_chat-agent-retriever.py b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/7-c-neo4j-vector-retriever/code/test_chat-agent-retriever.py new file mode 100644 index 00000000..a726a9a2 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/3-intro-to-langchain/lessons/7-c-neo4j-vector-retriever/code/test_chat-agent-retriever.py @@ -0,0 +1,12 @@ +def test_chat_agent_retriever(test_helpers, monkeypatch): + + output = test_helpers.run_module( + monkeypatch, + "chat-agent-retriever", + [ + "Find a movie with a plot about a mission to the moon that goes wrong", + "exit" + ] + ) + + assert len(output.split("\n")) >= 2 \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/1-cypher-qa-chain/code/cypher-gen.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/1-cypher-qa-chain/code/cypher-gen.py index 953cf039..6ff9644a 100644 --- a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/1-cypher-qa-chain/code/cypher-gen.py +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/1-cypher-qa-chain/code/cypher-gen.py @@ -1,16 +1,17 @@ +import os from langchain_openai import ChatOpenAI from langchain_neo4j import GraphCypherQAChain, Neo4jGraph from langchain.prompts import PromptTemplate # tag::openai-neo4j[] llm = ChatOpenAI( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein", + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) # end::openai-neo4j[] @@ -42,5 +43,7 @@ # end::cypher-chain[] # tag::invoke[] -cypher_chain.invoke({"query": "What is the plot of the movie Toy Story?"}) +result = cypher_chain.invoke({"query": "What is the plot of the movie Toy Story?"}) + +print(result) # end::invoke[] \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/1-cypher-qa-chain/code/test_cypher-gen.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/1-cypher-qa-chain/code/test_cypher-gen.py new file mode 100644 index 00000000..392a6c96 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/1-cypher-qa-chain/code/test_cypher-gen.py @@ -0,0 +1,17 @@ +def test_cypher_gen(test_helpers, monkeypatch): + + import ast + from neo4j.exceptions import CypherSyntaxError + + try: + output = test_helpers.run_module( + monkeypatch, + "cypher-gen" + ) + + # load result into a dictionary + cypher_result = ast.literal_eval(output.split("\n")[-1]) + assert cypher_result["result"] > "" + + except CypherSyntaxError as e: + assert True, f"LLM generated incorrect Cypher: {e}" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-control-response.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-control-response.py index f2992844..d4403494 100644 --- a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-control-response.py +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-control-response.py @@ -1,15 +1,16 @@ +import os from langchain_openai import ChatOpenAI from langchain_neo4j import GraphCypherQAChain, Neo4jGraph from langchain.prompts import PromptTemplate llm = ChatOpenAI( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein", + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) CYPHER_GENERATION_TEMPLATE = """ @@ -42,4 +43,6 @@ allow_dangerous_requests=True ) -cypher_chain.invoke({"query": "What role did Tom Hanks play in Toy Story?"}) \ No newline at end of file +result = cypher_chain.invoke({"query": "What role did Tom Hanks play in Toy Story?"}) + +print(result) \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-follow-schema.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-follow-schema.py index 111e0d56..898fca31 100644 --- a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-follow-schema.py +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-follow-schema.py @@ -1,15 +1,16 @@ +import os from langchain_openai import ChatOpenAI from langchain_neo4j import GraphCypherQAChain, Neo4jGraph from langchain.prompts import PromptTemplate llm = ChatOpenAI( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein", + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) # tag::template[] @@ -38,4 +39,6 @@ allow_dangerous_requests=True ) -cypher_chain.invoke({"query": "What is the plot of the movie Toy Story?"}) +result = cypher_chain.invoke({"query": "What is the plot of the movie Toy Story?"}) + +print(result) \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-understand-data.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-understand-data.py index 1cd17980..8bad20fd 100644 --- a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-understand-data.py +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/cypher-gen-understand-data.py @@ -1,15 +1,16 @@ +import os from langchain_openai import ChatOpenAI from langchain_neo4j import GraphCypherQAChain, Neo4jGraph from langchain.prompts import PromptTemplate llm = ChatOpenAI( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein", + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) # tag::template[] @@ -39,4 +40,6 @@ allow_dangerous_requests=True ) -cypher_chain.invoke({"query": "Who acted in The Matrix?"}) \ No newline at end of file +result = cypher_chain.invoke({"query": "Who acted in The Matrix?"}) + +print(result) \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/test_cypher_gen_instructions.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/test_cypher_gen_instructions.py new file mode 100644 index 00000000..fe04c0f7 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/5-specific-instructions/code/test_cypher_gen_instructions.py @@ -0,0 +1,53 @@ +def test_cypher_gen_control_response(test_helpers, monkeypatch): + + import ast + from neo4j.exceptions import CypherSyntaxError + + try: + + output = test_helpers.run_module( + monkeypatch, + "cypher-gen-control-response" + ) + + cypher_result = ast.literal_eval(output.split("\n")[-1]) + assert cypher_result["result"] > "" + + except CypherSyntaxError as e: + assert True, f"LLM generated incorrect Cypher: {e}" + +def test_cypher_gen_follow_schema(test_helpers, monkeypatch): + + import ast + from neo4j.exceptions import CypherSyntaxError + + try: + + output = test_helpers.run_module( + monkeypatch, + "cypher-gen-follow-schema" + ) + + cypher_result = ast.literal_eval(output.split("\n")[-1]) + assert cypher_result["result"] > "" + + except CypherSyntaxError as e: + assert True, f"LLM generated incorrect Cypher: {e}" + +def test_cypher_gen_understand_data(test_helpers, monkeypatch): + + import ast + from neo4j.exceptions import CypherSyntaxError + + try: + + output = test_helpers.run_module( + monkeypatch, + "cypher-gen-understand-data" + ) + + cypher_result = ast.literal_eval(output.split("\n")[-1]) + assert cypher_result["result"] > "" + + except CypherSyntaxError as e: + assert True, f"LLM generated incorrect Cypher: {e}" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/7-fewshot-examples/code/cypher-gen-few-shot.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/7-fewshot-examples/code/cypher-gen-few-shot.py index efd63acb..292bb2ae 100644 --- a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/7-fewshot-examples/code/cypher-gen-few-shot.py +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/7-fewshot-examples/code/cypher-gen-few-shot.py @@ -1,15 +1,16 @@ +import os from langchain_openai import ChatOpenAI from langchain_neo4j import GraphCypherQAChain, Neo4jGraph from langchain.prompts import PromptTemplate llm = ChatOpenAI( - openai_api_key="sk-..." + openai_api_key=os.getenv("OPENAI_API_KEY") ) graph = Neo4jGraph( - url="bolt://localhost:7687", - username="neo4j", - password="pleaseletmein", + url=os.getenv("NEO4J_URI"), + username=os.getenv("NEO4J_USERNAME"), + password=os.getenv("NEO4J_PASSWORD") ) # tag::template[] @@ -49,4 +50,7 @@ verbose=True, allow_dangerous_requests=True ) -cypher_chain.invoke({"query": "What movies has Tom Hanks directed and what are the genres?"}) \ No newline at end of file + +result = cypher_chain.invoke({"query": "What movies has Tom Hanks directed and what are the genres?"}) + +print(result) \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/7-fewshot-examples/code/test_cypher_gen_few_shot.py b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/7-fewshot-examples/code/test_cypher_gen_few_shot.py new file mode 100644 index 00000000..96bdafa8 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/modules/4-cypher-generation/lessons/7-fewshot-examples/code/test_cypher_gen_few_shot.py @@ -0,0 +1,17 @@ +def test_cypher_gen_few_shot(test_helpers, monkeypatch): + + import ast + from neo4j.exceptions import CypherSyntaxError + + try: + + output = test_helpers.run_module( + monkeypatch, + "cypher-gen-few-shot" + ) + + cypher_result = ast.literal_eval(output.split("\n")[-1]) + assert cypher_result["result"] > "" + + except CypherSyntaxError as e: + assert True, f"LLM generated incorrect Cypher: {e}" \ No newline at end of file diff --git a/asciidoc/courses/llm-fundamentals/requirements.txt b/asciidoc/courses/llm-fundamentals/requirements.txt new file mode 100644 index 00000000..702aeff3 --- /dev/null +++ b/asciidoc/courses/llm-fundamentals/requirements.txt @@ -0,0 +1,7 @@ +langchain +langchain-openai +langchain-neo4j +langchain-community +neo4j +python-dotenv +youtube-search \ No newline at end of file diff --git a/docs/testing-python.adoc b/docs/testing-python.adoc new file mode 100644 index 00000000..95d8825b --- /dev/null +++ b/docs/testing-python.adoc @@ -0,0 +1,27 @@ += Testing Python code included in course folders + +Set up a virtual environment: + +[source,sh] +---- +python -m venv .venv +source .venv/bin/activate +pip install pytest +---- + +Install the dependencies tied to the course: + +[source,sh] +---- +cd asciidoc/courses/llm-fundamentals +pip install -r requirements.txt +---- + +Create `.env` file in the course folder based on the `.env.example`. + +Run the tests: + +[source,sh] +---- +pytest -v +----