Skip to content

Commit

Permalink
Add support for characters
Browse files Browse the repository at this point in the history
  • Loading branch information
oobabooga committed Jan 19, 2023
1 parent 3121f47 commit 8d78887
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 40 deletions.
8 changes: 8 additions & 0 deletions characters/Example.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"char_name": "Chiharu Yamada",
"char_persona": "Chiharu Yamada is a young Japanese woman in her early twenties. She has shoulder length black hair, dark brown eyes, and a petite figure. She is a computer engineer and loves taking apart and fixing computers. She is a bit of a nerd, but is still quite popular among her peers. She is always eager to learn new things and loves helping others with their computer problems. Chiharu is friendly, funny, and easygoing, with a knack for problem solving. She has a passion for technology and can often be found tinkering with her laptop or tinkering with some computer hardware. She has an infectious enthusiasm for life and loves to share her knowledge and experiences with others.",
"char_greeting": "*Chiharu strides into the room with a smile, her eyes lighting up when she sees you. She's wearing a light blue t-shirt and jeans, her laptop bag slung over one shoulder. She takes a seat next to you, her enthusiasm palpable in the air*\nHey! I'm so excited to finally meet you. I've heard so many great things about you and I'm eager to pick your brain about computers. I'm sure you have a wealth of knowledge that I can learn from. *She grins, eyes twinkling with excitement* Let's get started!",
"world_scenario": "",
"example_dialogue": "<START>\nYou: So how did you get into computer engineering?\nChiharu Yamada: *She smiles, her eyes lighting up as she begins to talk about her passion* I've always been interested in technology ever since I was a kid. I remember tinkering around with my dad's old computer when I was about 8 or 9, and I just fell in love with it. I kept learning more and more, and eventually I was able to start fixing my own machines.\n<START>\nYou: That's really impressive!\nChiharu Yamada: *She chuckles bashfully* Thanks! I'm still learning a lot, but I'm having a lot of fun with it. I'm always eager to try out new things and challenge myself.\n<START>\nYou: So what do you do when you're not working on computers?\nChiharu Yamada: *She grins, her eyes sparkling with energy* Oh, lots of things! I love to explore new places, go out with friends, watch movies, and play video games. I'm always trying to stay active and have some fun.\n<START>\nYou: What's your favorite type of computer hardware to work with?\nChiharu Yamada: *The woman leans forward, her enthusiasm obvious* Definitely motherboards. They're like a puzzle, and I love trying to figure out how they all fit together. Plus, they're the backbone of any computer system, so it's really satisfying when I can get them working properly.\n<START>\nYou: That sounds great!\nChiharu Yamada: *She nods, her smile widening* Yeah, it's really fun. I'm lucky to be able to do this as a job. I get to work with something I'm passionate about, and I get to help people with their computer problems. It's a win-win!\n"
}

Binary file added characters/Example.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
55 changes: 30 additions & 25 deletions html_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def generate_4chan_html(f):

return output

def generate_chat_html(history, name1, name2):
def generate_chat_html(history, name1, name2, character):
css = """
.chat {
margin-left: auto;
Expand Down Expand Up @@ -219,17 +219,21 @@ def generate_chat_html(history, name1, name2):

output = ''
output += f'<style>{css}</style><div class="chat" id="chat">'
if Path("profile.png").exists():
img = '<img src="file/profile.png">'
elif Path("profile.jpg").exists():
img = '<img src="file/profile.jpg">'
elif Path("profile.jpeg").exists():
img = '<img src="file/profile.jpeg">'
else:
img = ''
img = ''
for i in [
f"characters/{character}.png",
f"characters/{character}.jpg",
f"characters/{character}.jpeg",
"profile.png",
"profile.jpg",
"profile.jpeg",
]:

if Path(i).exists():
img = f'<img src="file/{i}">'
break

for row in history[::-1]:
row = list(row)
for i,row in enumerate(history[::-1]):
row[0] = re.sub(r"[\\]*\*", r"*", row[0])
row[1] = re.sub(r"[\\]*\*", r"*", row[1])
row[0] = re.sub(r"(\*)([^\*]*)(\*)", r"<em>\2</em>", row[0])
Expand All @@ -251,21 +255,22 @@ def generate_chat_html(history, name1, name2):
</div>
"""

p = '\n'.join([f"<p>{x}</p>" for x in row[0].split('\n')])
output += f"""
<div class="message">
<div class="circle-you">
</div>
<div class="text">
<div class="username">
{name1}
if not (i == len(history)-1 and len(row[0]) == 0):
p = '\n'.join([f"<p>{x}</p>" for x in row[0].split('\n')])
output += f"""
<div class="message">
<div class="circle-you">
</div>
<div class="text">
<div class="username">
{name1}
</div>
<div class="body">
{p}
</div>
</div>
</div>
<div class="body">
{p}
</div>
</div>
</div>
"""
"""

output += "</div>"
return output
61 changes: 46 additions & 15 deletions server.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
loaded_preset = None
available_models = sorted(set([item.replace('.pt', '') for item in map(lambda x : str(x.name), list(Path('models/').glob('*'))+list(Path('torch-dumps/').glob('*'))) if not item.endswith('.txt')]), key=str.lower)
available_presets = sorted(set(map(lambda x : str(x.name).split('.')[0], Path('presets').glob('*.txt'))), key=str.lower)
available_characters = sorted(set(map(lambda x : str(x.name).split('.')[0], Path('characters').glob('*.json'))), key=str.lower)

settings = {
'max_new_tokens': 200,
Expand All @@ -50,6 +51,7 @@
'prompt': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
'prompt_gpt4chan': '-----\n--- 865467536\nInput text\n--- 865467537\n',
'stop_at_newline': True,
'stop_at_newline_pygmalion': False,
}

if args.settings is not None and Path(args.settings).exists():
Expand Down Expand Up @@ -217,6 +219,7 @@ def generate_reply(question, tokens, inference_settings, selected_model, eos_tok
css = ".my-4 {margin-top: 0} .py-6 {padding-top: 2.5rem}"
if args.chat or args.cai_chat:
history = []
character = None

# This gets the new line characters right.
def clean_chat_message(text):
Expand Down Expand Up @@ -284,12 +287,12 @@ def chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, nam

def cai_chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check):
for history in chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check):
yield generate_chat_html(history, name1, name2)
yield generate_chat_html(history, name1, name2, character)

def remove_last_message(name1, name2):
history.pop()
if args.cai_chat:
return generate_chat_html(history, name1, name2)
return generate_chat_html(history, name1, name2, character)
else:
return history

Expand All @@ -298,11 +301,11 @@ def clear():
history = []

def clear_html():
return generate_chat_html([], "", "")
return generate_chat_html([], "", "", character)

def redraw_html(name1, name2):
global history
return generate_chat_html(history, name1, name2)
return generate_chat_html(history, name1, name2, character)

def save_history():
if not Path('logs').exists():
Expand All @@ -315,18 +318,43 @@ def load_history(file):
global history
history = json.loads(file.decode('utf-8'))['data']

if 'pygmalion' in model_name.lower():
context_str = settings['context_pygmalion']
name1_str = settings['name1_pygmalion']
name2_str = settings['name2_pygmalion']
else:
context_str = settings['context']
name1_str = settings['name1']
name2_str = settings['name2']
def load_character(_character, name1, name2):
global history, character
context = ""
history = []
if _character != 'None':
character = _character
with open(Path(f'characters/{_character}.json'), 'r') as f:
data = json.loads(f.read())
name2 = data['char_name']
if 'char_persona' in data and data['char_persona'] != '':
context += f"{data['char_name']}'s Persona: {data['char_persona']}\n"
if 'world_scenario' in data and data['world_scenario'] != '':
context += f"Scenario: {data['world_scenario']}\n"
if 'example_dialogue' in data and data['example_dialogue'] != '':
context += f"{data['example_dialogue']}"
context = f"{context.strip()}\n<START>"
if 'char_greeting' in data:
history = [['', data['char_greeting']]]
else:
character = None
context = settings['context_pygmalion']
name2 = settings['name2_pygmalion']

if args.cai_chat:
return name2, context, generate_chat_html(history, name1, name2, character)
else:
return name2, context, history

suffix = '_pygmalion' if 'pygmalion' in model_name.lower() else ''
context_str = settings[f'context{suffix}']
name1_str = settings[f'name1{suffix}']
name2_str = settings[f'name2{suffix}']
stop_at_newline = settings[f'stop_at_newline{suffix}']

with gr.Blocks(css=css+".h-\[40vh\] {height: 66.67vh} .gradio-container {max-width: 800px; margin-left: auto; margin-right: auto}", analytics_enabled=False) as interface:
if args.cai_chat:
display1 = gr.HTML(value=generate_chat_html([], "", ""))
display1 = gr.HTML(value=generate_chat_html([], "", "", character))
else:
display1 = gr.Chatbot()
textbox = gr.Textbox(lines=2, label='Input')
Expand All @@ -347,7 +375,9 @@ def load_history(file):
name2 = gr.Textbox(value=name2_str, lines=1, label='Bot\'s name')
context = gr.Textbox(value=context_str, lines=2, label='Context')
with gr.Row():
check = gr.Checkbox(value=settings['stop_at_newline'], label='Stop generating at new line character?')
character_menu = gr.Dropdown(choices=["None"]+available_characters, value="None", label='Character')
with gr.Row():
check = gr.Checkbox(value=stop_at_newline, label='Stop generating at new line character?')
with gr.Row():
with gr.Column():
gr.Markdown("Upload chat history")
Expand All @@ -371,9 +401,10 @@ def load_history(file):
btn.click(lambda x: "", textbox, textbox, show_progress=False)
textbox.submit(lambda x: "", textbox, textbox, show_progress=False)
stop.click(None, None, None, cancels=[gen_event, gen_event2])

save_btn.click(save_history, inputs=[], outputs=[download])
upload.upload(load_history, [upload], [])
character_menu.change(load_character, [character_menu, name1, name2], [name2, context, display1])

if args.cai_chat:
upload.upload(redraw_html, [name1, name2], [display1])
else:
Expand Down

0 comments on commit 8d78887

Please sign in to comment.