diff --git a/cookbooks/Gradio/README.md b/cookbooks/Gradio/README.md new file mode 100644 index 000000000..6ae538709 --- /dev/null +++ b/cookbooks/Gradio/README.md @@ -0,0 +1,59 @@ +# Gradio Workbook Editor + +## Prompt IDE with LastMile AI + +This cookbook is meant to demonstrate the capabilities of our AI Workbook editor. It can run inference against locally hosted or remote models from many inference providers, including Hugging Face, OpenAI and others. + +It supports text, image and audio model formats, allowing you to easily chain them together in a single notebook session! + +With `aiconfig`, it lets you save the state in a single json config file which you can share with others. In addition to editing the `FILE_NAME.aiconfig.json` file through our Editor interface, you can also use the AIConfig SDK to interact with it in application code, providing a single interface to run inference across any model and modality (media formats). + +## Tech Stack + +What you see here is a "local editor" -- a React frontend and a Flask server which allow you to edit `.aiconfig.json` files in a notebook-like UI. + +- Frontend code: + +### Gradio custom component + +The Gradio custom component is currently WIP. + +**Note**: We already have the Gradio backend that corresponds to the Flask server in the [`gradio-workbook`](https://github.com/lastmile-ai/gradio-workbook) repo. + +We are working on using `sveltris` to package our React frontend to work with Gradio. Once that works, the same experience you see in this cookbook will be possible inside a Gradio custom component. + +## Getting Started + +**Instructions**: + +- Clone https://github.com/lastmile-ai/aiconfig +- Go back to top-level directory: `cd ` + +- Setup an alias for "aiconfig" command: + Go to + +```bash +alias aiconfig="python -m 'aiconfig.scripts.aiconfig_cli'" +``` + +- `cd /cookbooks/Gradio` + +- `pip3 install -r requirements.txt` + +- Install `python-aiconfig-test` package from `test-pypi`: + +``` +pip3 install --index-url https://test.pypi.org/simple --extra-index-url https://pypi.org/simple python-aiconfig-test==1.1.25 --force +``` + +No run this command to start the AIConfig editor: + +```bash +aiconfig edit --aiconfig-path=huggingface.aiconfig.json --parsers-module-path=hf_model_parsers.py +``` + +## TODO + +- Publish new version of aiconfig_extension_hugging_face package +- Update huggingface.aiconfig.json with clean examples +- Add video demo diff --git a/cookbooks/Gradio/hf_model_parsers.py b/cookbooks/Gradio/hf_model_parsers.py new file mode 100644 index 000000000..731c536c4 --- /dev/null +++ b/cookbooks/Gradio/hf_model_parsers.py @@ -0,0 +1,33 @@ +from aiconfig_extension_hugging_face import ( + HuggingFaceAutomaticSpeechRecognitionTransformer, + HuggingFaceImage2TextTransformer, + HuggingFaceTextSummarizationTransformer, + HuggingFaceText2ImageDiffusor, + HuggingFaceText2SpeechTransformer, + HuggingFaceTextGenerationTransformer, + HuggingFaceTextTranslationTransformer, + +) +from aiconfig import AIConfigRuntime + + +def register_model_parsers() -> None: + """Register model parsers for HuggingFace models. + """ + # Audio --> Text + AIConfigRuntime.register_model_parser(HuggingFaceAutomaticSpeechRecognitionTransformer(), "AutomaticSpeechRecognition") + + # Image --> Text + AIConfigRuntime.register_model_parser(HuggingFaceImage2TextTransformer(), "Image2Text") + + # Text --> Image + AIConfigRuntime.register_model_parser(HuggingFaceText2ImageDiffusor(), "Text2Image") + + # Text --> Audio + AIConfigRuntime.register_model_parser(HuggingFaceText2SpeechTransformer(), "Text2Speech") + + # Text --> Text + AIConfigRuntime.register_model_parser(HuggingFaceTextGenerationTransformer(), "TextGeneration") + AIConfigRuntime.register_model_parser(HuggingFaceTextSummarizationTransformer(), "TextSummarization") + AIConfigRuntime.register_model_parser(HuggingFaceTextTranslationTransformer(), "Translation") + diff --git a/cookbooks/Gradio/hi.mp3 b/cookbooks/Gradio/hi.mp3 new file mode 100644 index 000000000..a5c9a0452 Binary files /dev/null and b/cookbooks/Gradio/hi.mp3 differ diff --git a/cookbooks/Gradio/huggingface.aiconfig.json b/cookbooks/Gradio/huggingface.aiconfig.json new file mode 100644 index 000000000..7f82ea678 --- /dev/null +++ b/cookbooks/Gradio/huggingface.aiconfig.json @@ -0,0 +1,120 @@ +{ + "name": "The Tale of the Quick Brown Fox", + "schema_version": "latest", + "metadata": { + "parameters": {}, + "models": { + "TextGeneration": { + "model": "stevhliu/my_awesome_billsum_model", + "min_length": 10, + "max_length": 30 + }, + "ImageToText": { + "model": "Salesforce/blip-image-captioning-base" + }, + "Text2Speech": { + "model": "suno/bark" + }, + "TextSummarization": { + "model": "facebook/bart-large-cnn" + }, + "TextTranslation": { + "model": "translation_en_to_fr" + } + }, + "default_model": "TextGeneration", + "model_parsers": { + "Image2Text": "HuggingFaceImage2TextTransformer", + "Salesforce/blip-image-captioning-base": "HuggingFaceImage2TextTransformer", + "Text2Speech": "HuggingFaceText2SpeechTransformer", + "suno/bark": "HuggingFaceText2SpeechTransformer", + "TextSummarization": "HuggingFaceTextSummarizationTransformer", + "facebook/bart-large-cnn": "HuggingFaceTextSummarizationTransformer", + "TextTranslation": "HuggingFaceTextTranslationTransformer", + "translation_en_to_fr": "HuggingFaceTextTranslationTransformer" + } + }, + "description": "The Tale of the Quick Brown Fox", + "prompts": [ + { + "name": "translate_instruction", + "input": "Tell the tale of {{topic}}", + "metadata": { + "model": { + "name": "Translation", + "settings": { + "model": "translation_en_to_fr" + } + }, + "parameters": { + "topic": "the quick brown fox" + } + }, + "outputs": [] + }, + { + "name": "summarize_story", + "input": "Once upon a time, in a lush and vibrant forest, there lived a magnificent creature known as the Quick Brown Fox. This fox was unlike any other, possessing incredible speed and agility that awed all the animals in the forest. With its fur as golden as the sun and its eyes as sharp as emeralds, the Quick Brown Fox was admired by everyone, from the tiniest hummingbird to the mightiest bear. The fox had a kind heart and would often lend a helping paw to those in need. The Quick Brown Fox had a particular fondness for games and challenges. It loved to test its skills against others, always seeking new adventures to satisfy its boundless curiosity. Its favorite game was called \"The Great Word Hunt,\" where it would embark on a quest to find hidden words scattered across the forest.", + "metadata": { + "model": { + "name": "TextSummarization", + "settings": {} + }, + "parameters": {} + } + }, + { + "name": "generate_audio_title", + "input": "The Quick Brown Fox was admired by all the animals in the forest.", + "metadata": { + "model": "Text2Speech", + "parameters": {} + }, + "outputs": [] + }, + { + "name": "generate_caption", + "input": { + "attachments": [ + { + "data": "/Users/jonathan/Desktop/pic.png", + "mime_type": "image/png" + } + ] + }, + "metadata": { + "model": "Image2Text", + "parameters": {} + }, + "outputs": [] + }, + { + "name": "openai_gen_itinerary", + "input": "Generate an itinerary for a 2 day trip to NYC ordered by {{order_by}}.", + "metadata": { + "model": "gpt-4", + "parameters": { + "order_by": "geographic location" + } + }, + "outputs": [] + }, + { + "name": "Audio Speech Recognition", + "input": { + "attachments": [ + { + "data": "./hi.mp3", + "mime_type": "audio/mpeg" + } + ] + }, + "metadata": { + "model": "openai/whisper-small", + "parameters": {} + }, + "outputs": [] + } + ], + "$schema": "https://json.schemastore.org/aiconfig-1.0" +} \ No newline at end of file diff --git a/cookbooks/Gradio/requirements.txt b/cookbooks/Gradio/requirements.txt new file mode 100644 index 000000000..3335e3ba5 --- /dev/null +++ b/cookbooks/Gradio/requirements.txt @@ -0,0 +1,5 @@ +# AIConfig +python-aiconfig + +# Hugging Face Extension for AIConfig +aiconfig-extension-hugging-face \ No newline at end of file diff --git a/cookbooks/Gradio/travel.aiconfig.json b/cookbooks/Gradio/travel.aiconfig.json new file mode 100644 index 000000000..3cc596a1a --- /dev/null +++ b/cookbooks/Gradio/travel.aiconfig.json @@ -0,0 +1,40 @@ +{ + "name": "NYC Trip Planner", + "schema_version": "latest", + "metadata": { + "parameters": { + "": "" + }, + "models": { + "gpt-3.5-turbo": { + "model": "gpt-3.5-turbo", + "top_p": 1, + "temperature": 1 + }, + "gpt-4": { + "model": "gpt-4", + "max_tokens": 3000, + "system_prompt": "You are an expert travel coordinator with exquisite taste." + } + }, + "default_model": "gpt-3.5-turbo" + }, + "description": "Intrepid explorer with ChatGPT and AIConfig", + "prompts": [ + { + "name": "get_activities", + "input": "Tell me 10 fun attractions to do in NYC." + }, + { + "name": "gen_itinerary", + "input": "Generate an itinerary ordered by {{order_by}} for these activities: {{get_activities.output}}.", + "metadata": { + "model": "gpt-4", + "parameters": { + "order_by": "geographic location" + } + } + } + ], + "$schema": "https://json.schemastore.org/aiconfig-1.0" +} \ No newline at end of file diff --git a/extensions/HuggingFace/python/huggingface.aiconfig.json b/extensions/HuggingFace/python/huggingface.aiconfig.json new file mode 100644 index 000000000..2ab54cc38 --- /dev/null +++ b/extensions/HuggingFace/python/huggingface.aiconfig.json @@ -0,0 +1,21 @@ +{ + "name": "", + "schema_version": "latest", + "metadata": { + "parameters": {}, + "models": {} + }, + "description": "", + "prompts": [ + { + "name": "prompt_1", + "input": "", + "metadata": { + "model": "gpt-4", + "parameters": {} + }, + "outputs": [] + } + ], + "$schema": "https://json.schemastore.org/aiconfig-1.0" +} \ No newline at end of file