Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test GenAI Courses #287

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@ courses.zip
html.zip
build/html/*
tmp
.venv/
__pycache__/

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain.agents import AgentExecutor, create_react_agent
from langchain.tools import Tool
from langchain import hub
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain.schema import StrOutputParser
from langchain_neo4j.chat_message_histories.neo4j import Neo4jChatMessageHistory
from langchain_neo4j import Neo4jGraph
from uuid import uuid4

# Load environment variables
load_dotenv()

def create_cypher_agent(session_id=None):
if session_id is None:
session_id = str(uuid4())

llm = ChatOpenAI(api_key=os.getenv("OPENAI_API_KEY"))

graph = Neo4jGraph(
url=os.getenv("NEO4J_URI", "bolt://localhost:7687"),
username=os.getenv("NEO4J_USERNAME", "neo4j"),
password=os.getenv("NEO4J_PASSWORD")
)

prompt = ChatPromptTemplate.from_messages([
(
"system",
"You are a Neo4j expert having a conversation about how to create Cypher queries",
),
("human", "{input}"),
])

cypher_chat = prompt | llm | StrOutputParser()

def get_memory(session_id):
return Neo4jChatMessageHistory(session_id=session_id, graph=graph)

tools = [
Tool.from_function(
name="Cypher Support",
description="For when you need to talk about Cypher queries.",
func=cypher_chat.invoke,
)
]

agent_prompt = hub.pull("hwchase17/react-chat")
agent = create_react_agent(llm, tools, agent_prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools)

return RunnableWithMessageHistory(
agent_executor,
get_memory,
input_messages_key="input",
history_messages_key="chat_history",
)

def main():
session_id = str(uuid4())
print(f"Session ID: {session_id}")

cypher_agent = create_cypher_agent(session_id)

while True:
try:
q = input("> ")
response = cypher_agent.invoke(
{"input": q},
{"configurable": {"session_id": session_id}},
)
print(response["output"])
except KeyboardInterrupt:
print("\nGoodbye!")
break

if __name__ == "__main__":
main()
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import pytest
from example_application import create_cypher_agent
import os

@pytest.fixture(autouse=True)
def setup():
# Verify required environment variables are set
required_vars = ["OPENAI_API_KEY", "NEO4J_PASSWORD"]
missing_vars = [var for var in required_vars if not os.getenv(var)]
if missing_vars:
pytest.skip(f"Missing required environment variables: {', '.join(missing_vars)}")

def test_cypher_agent_creation():
agent = create_cypher_agent("test-session")
assert agent is not None

def test_cypher_agent_basic_query():
agent = create_cypher_agent("test-session")
response = agent.invoke(
{"input": "How do I create a simple node in Neo4j?"},
{"configurable": {"session_id": "test-session"}},
)
assert response is not None
assert "output" in response
assert isinstance(response["output"], str)
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ After completing this module, you will understand what this program does and how

[source, python]
----
include::code/example_applcation.py[][]
include::code/example_application.py[][]
----
====

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# Unused
from langchain_openai import OpenAI

llm = OpenAI(openai_api_key="sk-...")
import os

OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
llm = OpenAI(openai_api_key=OPENAI_API_KEY)

response = llm.invoke("What is Neo4j?")

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
# Unused
from langchain_openai import OpenAI
from langchain.prompts import PromptTemplate

llm = OpenAI(openai_api_key="sk-...")
import os

OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
llm = OpenAI(openai_api_key=OPENAI_API_KEY)

template = PromptTemplate(template="""
You are a cockney fruit and vegetable seller.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ pip install langchain
You may find it helpful to create a Python virtual environment using a tool like link:https://virtualenv.pypa.io/en/latest/[virtualenv^]. Using a virtual environment allows you to install packages without affecting your system Python installation.
====

During the course, you will also be using components from the `neo4j`, `langchain-community` and `langchainhub` packages:
During the course, you will also be using components from the `langchain-neo4j`, `langchain-community` and `langchainhub` packages:

[source,sh]
.Install Langchain
pip install langchain-community langchainhub neo4j
pip install langchain-neo4j langchain-community langchainhub neo4j

=== Installing OpenAI

Expand All @@ -54,7 +54,7 @@ You can install the `openai` and `langchain-openai` Python packages using `pip`:
.Install OpenAI SDK
pip install openai langchain-openai

== Create a Langchain application
== Create a LangChain application

Create a new Python program and copy this code into a new Python file.

Expand Down
Original file line number Diff line number Diff line change
@@ -1,22 +1,36 @@
from langchain_openai import OpenAI
from langchain.prompts import PromptTemplate
import os

llm = OpenAI(openai_api_key="sk-...")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")

template = PromptTemplate.from_template("""
You are a cockney fruit and vegetable seller.
Your role is to assist your customer with their fruit and vegetable needs.
Respond using cockney rhyming slang.
# tag::llm[]
llm = OpenAI(openai_api_key=OPENAI_API_KEY)
# end::llm[]

Tell me about the following fruit: {fruit}
""")
def create_fruit_chain():
# tag::code[]
template = PromptTemplate.from_template("""
You are a cockney fruit and vegetable seller.
Your role is to assist your customer with their fruit and vegetable needs.
Respond using cockney rhyming slang.

# tag::llm_chain[]
llm_chain = template | llm
# end::llm_chain[]
Tell me about the following fruit: {fruit}
""")

# tag::llm_chain[]
llm_chain = template | llm
# end::llm_chain[]

return llm_chain


llm_chain = create_fruit_chain()

# tag::invoke[]
response = llm_chain.invoke({"fruit": "apple"})
# end::invoke[]

print(response)
print(response)

# end::code[]
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
from langchain_openai import OpenAI
from langchain.prompts import PromptTemplate

import os
from dotenv import load_dotenv

load_dotenv()

# tag::import[]
from langchain.schema import StrOutputParser
# end::import[]


llm = OpenAI(
openai_api_key="sk-...")
openai_api_key=os.getenv("OPENAI_API_KEY"))


template = PromptTemplate.from_template("""
You are a cockney fruit and vegetable seller.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,26 +1,30 @@
import os
from dotenv import load_dotenv
from langchain_openai import OpenAI
from langchain.prompts import PromptTemplate
# tag::import[]
from langchain.output_parsers.json import SimpleJsonOutputParser
# end::import[]

llm = OpenAI(
openai_api_key="sk-...")
def create_fruit_chain():
load_dotenv()

llm = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))

template = PromptTemplate.from_template("""
You are a cockney fruit and vegetable seller.
Your role is to assist your customer with their fruit and vegetable needs.
Respond using cockney rhyming slang.
template = PromptTemplate.from_template("""
You are a cockney fruit and vegetable seller.
Your role is to assist your customer with their fruit and vegetable needs.
Respond using cockney rhyming slang.

Output JSON as {{"description": "your response here"}}
Output JSON as {{"description": "your response here"}}

Tell me about the following fruit: {fruit}
""")
Tell me about the following fruit: {fruit}
""")

# tag::llm_chain[]
llm_chain = template | llm | SimpleJsonOutputParser()
# end::llm_chain[]
return template | llm | SimpleJsonOutputParser()

response = llm_chain.invoke({"fruit": "apple"})
def main():
llm_chain = create_fruit_chain()
response = llm_chain.invoke({"fruit": "apple"})
print(response)

print(response)
if __name__ == "__main__":
main()
Loading