Skip to content

Commit

Permalink
Support configure logging via config file
Browse files Browse the repository at this point in the history
  • Loading branch information
StdioA committed Aug 26, 2024
1 parent 457266d commit c20e379
Show file tree
Hide file tree
Showing 10 changed files with 127 additions and 22 deletions.
9 changes: 6 additions & 3 deletions bean_utils/vec_query.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import logging
import time
import requests
from beancount.loader import load_file
from beancount.core.data import Transaction
Expand Down Expand Up @@ -100,7 +100,7 @@ def _read_lines(fname, start, end):
total_usage += usage

build_db(unique_txs_list)
logging.info("Total token usage: %d", total_usage)
conf.logger.info("Total token usage: %d", total_usage)
return total_usage


Expand All @@ -117,4 +117,7 @@ def build_db_from_file():
file_path = "main.bean"
entries, errors, options = load_file(file_path)
transactions = [e for e in entries if isinstance(e, Transaction)][-1000:]
logging.debug("Tokens:", build_tx_db(transactions))
start_time = time.time()
tokens = build_tx_db(transactions)
duration = time.time() - start_time
conf.logger.info("Tokens: %d, duration: %d", tokens, duration)
3 changes: 3 additions & 0 deletions bots/mmbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,10 @@ async def submit_listener(self, event: WebHookEvent):
if webhook_id == "submit":
reaction = "white_check_mark"
bean_manager.commit_trx(trx.strip())
conf.logger.info("Commit transaction: %s\n", trx)
else:
reaction = "wastebasket"
conf.logger.info("Cancel transaction")

self.driver.respond_to_web(event, {
"update": {
Expand Down Expand Up @@ -166,4 +168,5 @@ def run_bot():
), # Either specify your settings here or as environment variables.
plugins=[BeanBotPlugin()], # Add your own plugins here.
)
conf.logger.info("Beancount bot start")
bot.run()
7 changes: 3 additions & 4 deletions bots/telegram_bot.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# coding: utf-8
import time
from conf.i18n import gettext as _
import logging
from datetime import timedelta, datetime
import telegram
from telegram import Update
Expand Down Expand Up @@ -137,10 +136,10 @@ async def callback(update, context):
if choice == "submit":
result_msg = _("Submitted ✅")
bean_manager.commit_trx(trx)
logging.info("Commit transaction: %s\n", trx)
conf.logger.info("Commit transaction: %s\n", trx)
else:
result_msg = _("Cancelled ❌")
logging.info("Cancel transaction")
conf.logger.info("Cancel transaction")

if result_msg:
await query.edit_message_text(text=f"{trx}\n\n{result_msg}")
Expand Down Expand Up @@ -183,5 +182,5 @@ def run_bot():
for handler in handlers:
application.add_handler(handler)

logging.info("Bot start")
conf.logger.info("Beancount bot start")
application.run_polling(allowed_updates=Update.ALL_TYPES)
39 changes: 37 additions & 2 deletions conf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,48 @@
import logging
import logging.config
from .i18n import init_locale
from .config_data import Config
from .utils import merge_dicts


__all__ = ['config', 'init_locale', "load_config"]

__all__ = ['config', 'init_locale', "load_config", "logger", "init_logging"]

config = None

_logger_name = "beanbot"
logger = logging.getLogger(_logger_name)


def load_config(config_path):
global config
config = Config(config_path)


_default_logging_config = {
"version": 1,
"formatters": {
"standard": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
"formatter": "standard",
"stream": "ext://sys.stdout"
}
},
"loggers": {
_logger_name: {
"level": "WARNING",
"handlers": ["console"],
"propagate": False,
}
}
}


def init_logging():
logging_conf = merge_dicts(_default_logging_config, config.get("logging", {}))
logging.config.dictConfig(logging_conf)
9 changes: 9 additions & 0 deletions conf/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
def merge_dicts(dict1, dict2):
result = dict1.copy()

for key, value in dict2.items():
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
result[key] = merge_dicts(result[key], value)
else:
result[key] = value
return result
38 changes: 38 additions & 0 deletions conf/utils_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from conf.utils import merge_dicts


def test_merge_empty_dicts():
dict1 = {}
dict2 = {}
expected = {}
assert merge_dicts(dict1, dict2) == expected

def test_merge_dict_with_empty_dict():
dict1 = {'a': 1, 'b': 2}
dict2 = {}
expected = {'a': 1, 'b': 2}
assert merge_dicts(dict1, dict2) == expected

def test_merge_non_overlapping_keys():
dict1 = {'a': 1, 'b': 2}
dict2 = {'c': 3, 'd': 4}
expected = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
assert merge_dicts(dict1, dict2) == expected

def test_merge_overlapping_keys_non_dict_values():
dict1 = {'a': 1, 'b': 2}
dict2 = {'b': 3, 'c': 4}
expected = {'a': 1, 'b': 3, 'c': 4}
assert merge_dicts(dict1, dict2) == expected

def test_merge_overlapping_keys_dict_values():
dict1 = {'a': 1, 'b': {'x': 1, 'y': 2}}
dict2 = {'b': {'y': 3, 'z': 4}}
expected = {'a': 1, 'b': {'x': 1, 'y': 3, 'z': 4}}
assert merge_dicts(dict1, dict2) == expected

def test_merge_deeply_nested_dict_values():
dict1 = {'a': 1, 'b': {'x': 1, 'y': {'m': 1, 'n': 2}}}
dict2 = {'b': {'y': {'n': 3, 'o': 4}}}
expected = {'a': 1, 'b': {'x': 1, 'y': {'m': 1, 'n': 3, 'o': 4}}}
assert merge_dicts(dict1, dict2) == expected
18 changes: 18 additions & 0 deletions config.yaml.example
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,21 @@ rag:
api_url: "https://api.deepseek.com/v1/chat/completions" # OpenAI compatible API endpoint
api_key: "{your_key_here}"
model: "deepseek-chat"

# Logging config, you can specify any key to override the default config (e.g. level only)
# See https://docs.python.org/3/library/logging.config.html#logging-config-dictschema
logging:
formatters:
standard:
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
handlers:
console:
class: logging.StreamHandler
level: WARNING
formatter: standard
stream: ext://sys.stdout
loggers:
beanbot:
level: WARNING
handlers: [console]
propagate: no
20 changes: 11 additions & 9 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,20 @@
import argparse
import conf
import logging
from bean_utils import bean


def main():
def init_bot(config_path):
conf.load_config(config_path)
# Init i18n
conf.init_locale()
# Init logging
logging.basicConfig(level=logging.INFO)
# logging.getLogger().addHandler(logging.StreamHandler())
conf.init_logging()
# Init beancount manager
bean.init_bean_manager()



def main():
parser = argparse.ArgumentParser(prog='beanbot',
description='Bot to translate text into beancount transaction')
subparser = parser.add_subparsers(title='sub command', dest='command')
Expand All @@ -22,11 +28,7 @@ def main():
if args.command is None:
parser.print_help()
return

conf.load_config(args.c)
# Init i18n
conf.init_locale()
bean.init_bean_manager()
init_bot(args.c)

if args.command == "telegram":
from bots.telegram_bot import run_bot
Expand Down
3 changes: 1 addition & 2 deletions vec_db/json_vec_db.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import pathlib
import json
import logging
from operator import itemgetter
import numpy as np
from numpy.linalg import norm
Expand All @@ -24,7 +23,7 @@ def query_by_embedding(embedding, sentence, candidate_amount):
with open(_get_db_name()) as f:
transactions = json.load(f)
except FileNotFoundError:
logging.warning("JSON vector database is not built")
conf.logger.warning("JSON vector database is not built")
return None
embed_query = np.array(embedding)
# Calculate cosine similarity
Expand Down
3 changes: 1 addition & 2 deletions vec_db/sqlite_vec_db.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import pathlib
import logging
from operator import itemgetter
import sqlite3
import sqlite_vec
Expand Down Expand Up @@ -85,7 +84,7 @@ def query_by_embedding(embedding, sentence, candidate_amount):
except sqlite3.OperationalError as e:
# Handle exception when vec_db is not built
if "no such table" in e.args[0]:
logging.warning("Sqlite vector database is not built")
conf.logger.warning("Sqlite vector database is not built")
return []
raise
if not rows:
Expand Down

0 comments on commit c20e379

Please sign in to comment.