From ad208fee4a9b2b89941456c14d406719bff3027b Mon Sep 17 00:00:00 2001 From: Samuel Rince Date: Thu, 18 Apr 2024 11:21:24 +0200 Subject: [PATCH] feat: update dependencies make openai optional downgrade wrapt to compatible with tensorflow 2.15.0 (colab) --- .github/workflows/publish-pypi.yaml | 38 +++++++++++++++++++++++++ .github/workflows/publish-testpypi.yaml | 2 +- README.md | 6 ++-- ecologits/tracers/openai_tracer.py | 18 +++++++++--- poetry.lock | 19 +++++++------ pyproject.toml | 28 +++++++++--------- 6 files changed, 82 insertions(+), 29 deletions(-) create mode 100644 .github/workflows/publish-pypi.yaml diff --git a/.github/workflows/publish-pypi.yaml b/.github/workflows/publish-pypi.yaml new file mode 100644 index 00000000..f6f34795 --- /dev/null +++ b/.github/workflows/publish-pypi.yaml @@ -0,0 +1,38 @@ +name: publish-pypi + +on: + release: + types: [published] + +jobs: + build: + runs-on: ubuntu-latest + steps: + # check-out repo and set-up python + - name: Check out repository + uses: actions/checkout@v4 + - name: Set up python + id: setup-python + uses: actions/setup-python@v5 + with: + python-version: "3.9" + + # install & configure poetry + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + # configure credentials + - name: Configure credentials + run: poetry config pypi-token.pypi ${{ secrets.PYPI_API_KEY }} + + # build package + - name: Build package + run: poetry build + + # publish package + - name: Publish package + run: poetry publish diff --git a/.github/workflows/publish-testpypi.yaml b/.github/workflows/publish-testpypi.yaml index 584ea68d..25abad84 100644 --- a/.github/workflows/publish-testpypi.yaml +++ b/.github/workflows/publish-testpypi.yaml @@ -1,4 +1,4 @@ -name: Publish package on Test PyPI +name: publish-testpypi on: push: diff --git a/README.md b/README.md index 00fa4070..185cdb12 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,9 @@ EcoLogits ## ⚙️ Installation -Coming soon... +```shell +pip install ecologits +``` ## 🚀 Usage @@ -23,7 +25,7 @@ client = OpenAI( response = client.chat.completions.create( model="gpt-3.5-turbo", messages=[ - {"role": "user", "content": "Hello, can you explain what is the GenAI Impact project?"} + {"role": "user", "content": "Tell me a funny joke!"} ] ) diff --git a/ecologits/tracers/openai_tracer.py b/ecologits/tracers/openai_tracer.py index 0453d0b3..0958a0eb 100644 --- a/ecologits/tracers/openai_tracer.py +++ b/ecologits/tracers/openai_tracer.py @@ -1,15 +1,25 @@ import time from typing import Any, Callable, Union -from openai import AsyncStream, Stream -from openai.resources.chat import AsyncCompletions, Completions -from openai.types.chat import ChatCompletion as _ChatCompletion -from openai.types.chat import ChatCompletionChunk as _ChatCompletionChunk from wrapt import wrap_function_wrapper from ecologits.impacts import Impacts from ecologits.tracers.utils import compute_llm_impacts +try: + from openai import AsyncStream, Stream + from openai.resources.chat import AsyncCompletions, Completions + from openai.types.chat import ChatCompletion as _ChatCompletion + from openai.types.chat import ChatCompletionChunk as _ChatCompletionChunk +except ImportError: + AsyncStream = object() + Stream = object() + AsyncCompletions = object() + Completions = object() + _ChatCompletion = object() + _ChatCompletionChunk = object() + + PROVIDER = "openai" diff --git a/poetry.lock b/poetry.lock index bec7f9ab..4967d93e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -149,7 +149,7 @@ vertex = ["google-auth (>=2,<3)"] name = "anyio" version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, @@ -392,7 +392,7 @@ files = [ name = "distro" version = "1.9.0" description = "Distro - an OS platform information API" -optional = false +optional = true python-versions = ">=3.6" files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, @@ -617,7 +617,7 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, @@ -628,7 +628,7 @@ files = [ name = "httpcore" version = "1.0.5" description = "A minimal low-level HTTP client." -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, @@ -649,7 +649,7 @@ trio = ["trio (>=0.22.0,<0.26.0)"] name = "httpx" version = "0.25.2" description = "The next generation HTTP client." -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, @@ -1094,7 +1094,7 @@ setuptools = "*" name = "openai" version = "1.16.1" description = "The official Python library for the openai API" -optional = false +optional = true python-versions = ">=3.7.1" files = [ {file = "openai-1.16.1-py3-none-any.whl", hash = "sha256:77ef3db6110071f7154859e234250fb945a36554207a30a4491092eadb73fcb5"}, @@ -1726,7 +1726,7 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, @@ -1927,7 +1927,7 @@ files = [ name = "tqdm" version = "4.66.2" description = "Fast, Extensible Progress Meter" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, @@ -2277,8 +2277,9 @@ anthropic = ["anthropic"] cohere = ["cohere"] huggingface-hub = ["aiohttp", "huggingface-hub", "minijinja", "tiktoken"] mistralai = ["mistralai"] +openai = ["openai"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "9a06fd4df441197d69d4f8d5272cf6ca0fa8640b1a364eec49d5d1a59a902401" +content-hash = "018cfd368499cfc3a31d11bf7ad4dcbd96adaf855767c515985a74952306587d" diff --git a/pyproject.toml b/pyproject.toml index d7bdc0a9..af48261f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,17 +6,18 @@ build-backend = "poetry.core.masonry.api" name = "ecologits" version = "0.1.0" description = "EcoLogits tracks and estimates the energy consumption and environmental impacts of using generative AI models through APIs." -authors = ["GenAI Impact"] +authors = [ + "GenAI Impact", + "Data For Good" +] +maintainers = [ + "GenAI Impact" +] license = "MPL-2.0" readme = "README.md" repository = "https://github.com/genai-impact/ecologits" classifiers = [ -# "Programming Language :: Python", -# "Programming Language :: Python :: 3.9", -# "Programming Language :: Python :: 3.10", -# "Programming Language :: Python :: 3.11", -# "Programming Language :: Python :: 3.12", -# "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", + "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", "Topic :: Internet", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Software Development :: Libraries :: Python Modules", @@ -33,18 +34,19 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.9,<4" -openai = "^1.12.0" -wrapt = "^1.16.0" +wrapt = "^1.14.1" pydantic = ">=2,<3" +openai = { version = "^1.12.0", optional = true } mistralai = { version = "^0.1.3", optional = true } anthropic = { version = "^0.18.1", optional = true } cohere = {version = "^5.2.5", optional = true} huggingface-hub = { version = "^0.22.2", optional = true } -tiktoken = {version = "^0.6.0", optional = true} -aiohttp = {version = "^3.9.3", optional = true} -minijinja = {version = "^1.0.16", optional = true} +tiktoken = { version = "^0.6.0", optional = true } +aiohttp = { version = "^3.9.3", optional = true } +minijinja = { version = "^1.0.16", optional = true } [tool.poetry.extras] +openai = ["openai"] mistralai = ["mistralai"] anthropic = ["anthropic"] cohere = ["cohere"] @@ -178,7 +180,7 @@ line-length = 120 dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" -target-version = "py38" +target-version = "py39" [tool.ruff.mccabe] max-complexity = 10