diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..5cfe579 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,33 @@ +{ + "name": "Python 3", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye", + "customizations": { + "codespaces": { + "openFiles": [ + "README.md", + "streamlit_app.py" + ] + }, + "vscode": { + "settings": {}, + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance" + ] + } + }, + "updateContentCommand": "[ -f packages.txt ] && sudo apt update && sudo apt upgrade -y && sudo xargs apt install -y **Note:** Expand this section by considering the -> following points: - -- Give context and background on your library -- Explain why you created it -- Provide multiple examples and approaches of how - to work with it -- Help the reader make connections -- Avoid writing instructions or technical descriptions - here \ No newline at end of file +# StockTracker +## Compatibility + +This project has been tested on: + +- WSL 2 on Windows running Ubuntu 22.04 +- Ubuntu 22.04 + +## Motivation + +The motivation behind this project is to improve the developer's financial literacy and learn software engineering best practices. + +## Project Structure + +The main components of this project are: + +- *Web app*: Built using Python Streamlit +- *Business logic*: Implemented using Python and Flask +- *Database*: PostgreSQL + +## Technologies Used + +- Python 3.10 +- Flask +- Plotly +- Pandas +- psycopg2-binary +- Streamlit +- yfinance +- PostgreSQL + +## User Interface + +### User Interface +![alt text](./assets/user-interface-diagram.png) + +The image above illustrates what a user would see if the project is set up successfully. + +## Architecture + +### Architecture Diagram + +![alt text](./assets/architecture-diagram.png) + +The idea behind the project is to split each microservice into respective containers: + +- Web app container +- Business logic container +- Database container + +An ***NFS server*** is set up to allow sharing of PostgreSQL backup files (*.dump*) with the PostgreSQL ***database container***, which acts as the *NFS client*. Any user interactions with the ***web app container*** will send requests via *RESTful API* to the ***business logic container***, which in turn sends the requests to the ***database container*** via the *database connection*. + +## Demo + +A version of the web app that contains part of the database has been deployed. Feel free to give that a go. + +[Demo link](https://wleong1-stocktracker.streamlit.app/) diff --git a/docs/how-to-guides.md b/docs/how-to-guides.md index 18c2ad3..e7fdd8c 100755 --- a/docs/how-to-guides.md +++ b/docs/how-to-guides.md @@ -1,6 +1,106 @@ -This part of the project documentation focuses on a -**problem-oriented** approach. You'll tackle common -tasks that you might have, with the help of the code -provided in this project. +As mentioned previously, what we are trying to achieve is to have three separate containers, one for each microservice, that communicates with each other via RESTful API and database connection. -To be continued. \ No newline at end of file +An ***NFS server*** is set up to allow sharing of database backup file (*.dump*) with the ***database container***. + +The ***database container*** acts as an NFS client and accesses the directory shared by the ***NFS server***. The ***database container*** then restores the database via *pg_restore*. After that, the ***database container*** is ready to accept connections to the database. + +The ***business logic container*** receives requests sent from the user via the ***web app container*** and sends them to the ***database container*** via *database connection*. After the ***database container*** returns the data, the ***business logic container*** then processes the data and returns it in a certain format to the ***web app container***. + +The ***web app container*** receives the data from the ***business logic container***, does some simple processing and displays the data back to the user. + +As each container depends on one another. It is vital to set them up accordingly. Before you start, ensure that you have the folllowing dependencies installed: +- Docker +- net-tools (to get your IP address) +- API Key from NewsAPI +- vim (optional) + +## To set-up + +1. Clone the project repository using Git. + +2. Paste NewsAPI Key into *parameters.py* under *NEWS_API_KEY* + +2. There are three methods to provide the ***database container*** with the backup (*.dump*) file: + + === "The NFS method" + + As discussed previously, the NFS method sets up an NFS server and shares files with the NFS client. + + 1. Get your ip address: + + Open a terminal and type ```ifconfig``` or ```ip a``` Look for the *inet* or *inet addr* entry under the relevant network interface. It is usually the first one from the top. + + + 2. Save your IP Address in a global variable **. + + ```export HOST_IP=``` + + 3. Install NFS + + 1. ```sudo apt install nfs-kernel-server``` + 2. ```sudo systemctl start nfs-kernel-server.service``` + 3. ```sudo vim /etc/exports``` NOTE: Replace *vim* with the editor of your choice. + 4. Add ```/network-directory *(rw,sync,no_root_squash,no_subtree_check)``` + 5. ```sudo exportfs -a``` + + === "The non-NFS *volume mounting* method" + + The non-NFS *volume mounting* method mounts a local volume with the container and allows the container to access the required backup file through the shared volume. + + 1. In the *docker-compose.yml*, comment out the *volumes* section (lines 53-59). + + 2. Replace ```- nfs-volume:/nfs ``` in line 16 with ```- ./network_directory:/nfs``` + + === "The non-NFS *docker copy* method" + + The non-NFS *docker copy* method uses the *docker copy* to copy the required file to the correct directory in the container. + + 1. In the *docker-compose.yml*, comment out the *volumes* section (lines 53-59). + + 2. In the *docker-compose.yml*, comment out the *volumes* section in *db* (lines 15-16). + + 3. In the terminal, type ```docker copy ./network_directory/db.dump stocks-postgres:/nfs``` + +3. Run ```docker compose up``` + + If successful, you should see three different parts as illustrated below: + + - Stocks-postgres + ![alt text](./assets/postgres-container-setup.png) + + - Streamlit + ![alt text](./assets/streamlit-container-setup.png) + + - Core-modules + ![alt text](./assets/core-modules-container-setup.png) + +4. Once user is able to see the three successful images, user can now go to **127.0.0.1:8501** using your web browser to access the app. + +## To stop the application + +1. Run ```docker compose down``` + +2. Run ```docker image rm stocktracker-db:latest``` + +3. Run ```docker image rm stocktracker-core:latest``` + +4. Run ```docker image rm stocktracker-web:latest``` + +5. Run ```docker network rm stocks-network``` + +6. If you have NFS set up, run ```docker volume rm stocktracker_test-volume``` + + +## Common issues: + +### 1. Load metadata error + +![alt text](./assets/load-metadata-error.png) + +Could either remove ```"credsStore": "desktop.exe"``` from ```~/.docker/config.json``` **OR** ```docker pull postgres && docker pull ubuntu:22.04 && docker pull python:3.10-slim``` + +- Rerun ```docker compose up``` + +### 2. Missing core-modules successful image + +If the *streamlit* image appeared before the *core-modules* image, give it 30 seconds and it should appear. This is because the ***core-modules container*** is *"eager-loading"* the data of the database and saving it in a local variable. diff --git a/docs/index.md b/docs/index.md index f7dd8ac..348b598 100755 --- a/docs/index.md +++ b/docs/index.md @@ -6,22 +6,13 @@ financial year and calculator. ## Table Of Contents -The documentation follows the best practice for -project documentation as described by Daniele Procida -in the [Diátaxis documentation framework](https://diataxis.fr/) -and consists of four separate parts: - -1. [Tutorials](tutorials.md) +1. [Explanation](explanation.md) 2. [How-To Guides](how-to-guides.md) 3. [Reference](reference.md) -4. [Explanation](explanation.md) - -Quickly find what you're looking for depending on -your use case by looking at the different pages. ## Project Overview -::: src +StockTracker is a Python application that helps analyse and visualise stock performance data for S&P 500 companies. It provides a user-friendly GUI for comparing stock prices, viewing historical trends, and staying updated with the latest news. ## Acknowledgements I want to thank my house plants for providing me with diff --git a/docs/reference.md b/docs/reference.md index 162a8ec..48483a3 100755 --- a/docs/reference.md +++ b/docs/reference.md @@ -1,8 +1,3 @@ -This part of the project documentation focuses on -an **information-oriented** approach. Use it as a -reference for the technical implementation of the -`Calculators` project code. - ::: src.live_price_display ::: src.news_display ::: src.model \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index cc785b7..f5c4542 100755 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -8,7 +8,10 @@ plugins: nav: - Stock Tracker Docs: index.md - - tutorials.md + - Explanation: explanation.md - How-To Guides: how-to-guides.md - - reference.md - - explanation.md + - Reference: reference.md + +markdown_extensions: + - pymdownx.tabbed: + alternate_style: true diff --git a/requirements.txt b/requirements.txt old mode 100644 new mode 100755 index 0f0160b..1a291d9 --- a/requirements.txt +++ b/requirements.txt @@ -1,20 +1,40 @@ +altair==5.3.0 appdirs==1.4.4 asgiref==3.7.2 +attrs==23.2.0 beautifulsoup4==4.12.3 +blinker==1.7.0 +cachetools==5.3.3 certifi==2023.5.7 charset-normalizer==3.1.0 +click==8.1.7 +contourpy==1.2.0 coverage==7.4.3 +cycler==0.12.1 Django==4.1.12 django-admin-volt==1.0.10 exceptiongroup==1.1.1 +Flask==3.0.2 +fonttools==4.50.0 frozendict==2.4.0 +gitdb==4.0.11 +GitPython==3.1.43 gunicorn==21.2.0 html5lib==1.1 idna==3.4 iniconfig==2.0.0 install==1.3.5 +itsdangerous==2.1.2 +Jinja2==3.1.3 jmespath==1.0.1 +jsonschema==4.21.1 +jsonschema-specifications==2023.12.1 +kiwisolver==1.4.5 lxml==4.9.3 +markdown-it-py==3.0.0 +MarkupSafe==2.1.5 +matplotlib==3.8.3 +mdurl==0.1.2 multitasking==0.0.11 mypy==1.8.0 mypy-extensions==1.0.0 @@ -22,20 +42,39 @@ numpy==1.26.4 packaging==23.1 pandas==2.2.1 pandas-stubs==2.2.0.240218 +patsy==0.5.6 peewee==3.17.1 +pillow==10.2.0 +plotly==5.20.0 pluggy==1.0.0 +protobuf==4.25.3 psycopg2-binary==2.9.9 +pyarrow==15.0.2 +pydeck==0.8.1b0 +Pygments==2.17.2 +pyparsing==3.1.2 pytest==7.3.1 pytest-cov==4.1.0 python-dateutil==2.8.2 python-dotenv==1.0.0 pytz==2023.3 +referencing==0.34.0 requests==2.31.0 +rich==13.7.1 +rpds-py==0.18.0 ruff==0.3.0 +scipy==1.13.0 six==1.16.0 +smmap==5.0.1 soupsieve==2.5 sqlparse==0.4.4 +statsmodels==0.14.1 +streamlit==1.33.0 +tenacity==8.2.3 +toml==0.10.2 tomli==2.0.1 +toolz==0.12.1 +tornado==6.4 types-openpyxl==3.1.0.20240301 types-psycopg2==2.9.21.20240311 types-pytz==2024.1.0.20240203 @@ -44,6 +83,8 @@ typing_extensions==4.8.0 tzdata==2023.3 urllib3==2.0.2 utils==1.0.1 +watchdog==4.0.0 webencodings==0.5.1 +Werkzeug==3.0.2 whitenoise==6.5.0 yfinance==0.2.37 diff --git a/src/Dockerfile b/src/Dockerfile new file mode 100755 index 0000000..8353c1d --- /dev/null +++ b/src/Dockerfile @@ -0,0 +1,22 @@ +FROM ubuntu:22.04 + +# Set a directory for the app +WORKDIR /usr/src/ + +# Copy all the files to the container +COPY . . + +# Install dependencies +RUN apt-get update +RUN apt-get install -y python3 +RUN apt install -y python3-pip +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the shell script into the image +COPY entrypoint.sh /usr/local/bin/entrypoint.sh + +# Make the shell script executable +RUN chmod +x /usr/local/bin/entrypoint.sh + +# Set the entrypoint +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/src/entrypoint.sh b/src/entrypoint.sh new file mode 100644 index 0000000..47cd576 --- /dev/null +++ b/src/entrypoint.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +cd .. + +# Launches the HTTP endpoints +python3 src/flask_endpoints.py diff --git a/src/flask_endpoints.py b/src/flask_endpoints.py new file mode 100644 index 0000000..1cc71ba --- /dev/null +++ b/src/flask_endpoints.py @@ -0,0 +1,106 @@ +"""This module creates the endpoints to be called by streamlit.""" + +from typing import Union, Any +from flask import Flask, jsonify, request # pylint: disable=E0401 +import pandas as pd +import sys # pylint: disable=C0411 +import os # pylint: disable=C0411 +sys.path.append(os.getcwd()) +from src.model import Model # pylint: disable=C0413 +from src.live_price_display import LivePriceDisplay # type: ignore[import-untyped] # pylint: disable=C0413 +from src.news_display import NewsDisplay # pylint: disable=C0413 + + +models: Model = Model() +news_disp: NewsDisplay = NewsDisplay() +price_disp: LivePriceDisplay = LivePriceDisplay() +all_data: Union[pd.DataFrame, Any] = models.process_data() + +app = Flask(__name__) + +def update_graph(company: str) -> str: + """ + This method gets the data from the selected company. + + Args: + company: The ticker symbol of the company + + Returns: + chart_data: A DataFrame containing required information of all companies + """ + raw_data: pd.Series = all_data[company] + data: dict = { + "date": raw_data["trade_date"], + "close": raw_data["close"] + } + df: pd.DataFrame = pd.DataFrame(data) # pylint: disable=C0103 + chart_data: str = df.to_json() + return chart_data + +def update_price(company: str) -> Union[float, str]: + """ + Returns the price of the selected company using core modules. + + Args: + company_name: The ticker symbol of the company. + + Returns: + The most recent price. + """ + price: Union[float, str] = price_disp.display_final_price_yf(company) + return price + +def update_news(company: str) -> list: + """ + Get the formatted news. + + Args: + company: The ticker symbol of the company + + Returns: + news: The most recent five articles + """ + news: list = news_disp.format_news_django(company) + return news + +@app.route("/model/generate_company_list", methods=["GET"]) +def generate_company_list(): + """ + Returns the list of companies. + + Args: + None. + + Returns: + ticker_list: The list of companies in json format. + """ + ticker_list: list + ticker_list, _ = models.generate_company_list() + return jsonify(ticker_list) + +@app.route("/update_data", methods=["GET", "POST"]) +def update_data(): + """ + Returns the data of the selected company. + + Args: + None. + + Returns: + response: The data for the selected company in json. + """ + response: dict = {} + data: dict = request.get_json() + company: str = data["company"] + print(f"Received company: {company}") + processed_price: Union[float, str] = update_price(company) + processed_news: list = update_news(company) + processed_chart_data: str = update_graph(company) + + response["price"] = processed_price + response["news"] = processed_news + response["graph"] = processed_chart_data + return jsonify(response) + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=5000) diff --git a/src/live_price_display.py b/src/live_price_display.py index 89ea218..9ecfe27 100755 --- a/src/live_price_display.py +++ b/src/live_price_display.py @@ -57,13 +57,13 @@ def display_final_price_av(company_name: str) -> Union[str, dict, Any]: @staticmethod def display_final_price_yf(company_name: str) -> Union[float, str]: """ - Returns a the price using Yahoo Finance. + Returns the price of the selected company using Yahoo Finance. Args: - company_name: The ticker symbol of the company + company_name: The ticker symbol of the company. Returns: - The most recent price in string + The most recent price. """ # Uncomment below for full company names in selection rather than ticker symbols. # conn = psycopg2.connect(database = "stocks", user='postgres', password='123456') @@ -78,26 +78,3 @@ def display_final_price_yf(company_name: str) -> Union[float, str]: return round(price, 5) except IndexError: return "Error fetching price" - - -# from pymongo import MongoClient -# client = MongoClient(mongodb_connection) -# database = client.StockTracker -# collection = database.Companies -# projection = {"_id": 0, "name": 1, "price": 1} -# cursor = collection.find({"name": "MSFT"}, projection) -# for doc in cursor: -# latest_date = doc["price"][0]["date"] -# print(latest_date) -# symbols = ["AAPL", "MSFT", "AMZN", "GOOGL", "NVDA"]# "TSLA", "GOOG", "BRK.B", "META", "UNH" -# for symbol in symbols: -# price_params: dict = { -# "apikey": ALPHA_VANTAGE_API_KEY, -# "function": "TIME_SERIES_DAILY", -# "symbol": symbol, -# "outputsize": "full" -# } -# a = requests.get(ALPHA_VANTAGE_ENDPOINT, params=price_params).json() -# company = {"_id": symbol, "price":[{"date": b, "close": a["Time Series (Daily)"][b]["4. close"]} for b in a["Time Series (Daily)"]]} # pylint: disable=C0301 -# result = collection.insert_one(company) -# print(f"Inserted document ID: {result.inserted_id}") diff --git a/src/model.py b/src/model.py index a3bf243..6b9c98b 100755 --- a/src/model.py +++ b/src/model.py @@ -1,5 +1,4 @@ -"""This module reads csv data files and processes them into the required format""" - +"""This module processes raw data and returns processed data in required format.""" from typing import Union, Tuple import warnings import psycopg2 # pylint: disable=E0401 @@ -12,27 +11,42 @@ class Model: """Processes data and returns data in required format""" def __init__(self) -> None: + """ + Constructs all the necessary attributes to make the necessary connection to the + database. + + Args: + None + + Returns: + None + """ self.path: str = "../individual_stocks_5yr/" + self.params: dict = {"host":"stocks-postgres", + "database":"stocks", + "user":"postgres", + "password":"123456", + "port":"5432"} - @staticmethod - def generate_company_list() -> Tuple[list, list]: + def generate_company_list(self) -> Tuple[list, list]: """ Returns a list of companies. - :return: (list) A list of companies. + Args: + None + + Returns: + A list of companies. """ - conn = psycopg2.connect( - host="172.19.0.2", - database="stocks", - user="postgres", - password="123456", - port="5432") + conn: psycopg2.extensions.connection = psycopg2.connect(**self.params) cursor = conn.cursor() cursor.execute("SELECT * FROM companies;") records = cursor.fetchall() ticker_list: list = [] companies_list: list = [] for row in records: + ticker: str + company: str (_, ticker, company) = row company = company.replace("\xa0", " ") ticker_list.append(ticker) @@ -40,12 +54,16 @@ def generate_company_list() -> Tuple[list, list]: conn.close() return ticker_list, companies_list - def check_headers_and_data(self, file, expected_headers) -> bool: + def check_headers_and_data(self, file: str, expected_headers: list) -> bool: """ Checks if each csv file has the expected headers and at least one data point for each header - :param filename: (str) The name of the file being checked - :param expected_headers: (list) The list of headers required - :return: (bool) The results of the file + + Args: + file: The name of the file being checked + expected_headers: The list of headers required + + Returns: + The results of the file """ has_expected_headers: bool = False has_data: bool = False @@ -84,26 +102,30 @@ def process_data(self) -> Union[pd.DataFrame, str]: """ Slices the data as required. - :return: (DataFrame) A DataFrame containing required information of all companies. + Args: + None + + Returns: + A DataFrame containing required information of all companies. """ companies_list: Tuple[list, list] = self.generate_company_list() companies_data: dict = {} - conn: psycopg2.extensions.connection = psycopg2.connect( - database="stocks", user="postgres", password="123456" - ) - number_of_companies: int = len(companies_list[0]) - for company_idx in range(1, number_of_companies + 1): - query: str = f"SELECT trade_date, close FROM stock_prices_main \ - WHERE company_id = {company_idx} ORDER BY trade_date ASC;" - company_df: pd.DataFrame = pd.read_sql(query, conn) + conn: psycopg2.extensions.connection = psycopg2.connect(**self.params) + query: str = "SELECT company_id, trade_date, close FROM stock_prices_main \ + GROUP BY company_id, trade_date, close ORDER BY trade_date ASC;" + all_data: pd.DataFrame = pd.read_sql(query, conn) + grouped_data = all_data.groupby('company_id')[["trade_date", "close"]] + for company_id, group_data in grouped_data: + company_df: pd.DataFrame = group_data company_df["trade_date"] = pd.to_datetime(company_df["trade_date"]) company_df["trade_date"] = company_df["trade_date"].dt.strftime("%Y-%m-%d") company_df["close"] = pd.to_numeric(company_df["close"]) modified_data: dict = company_df.to_dict("list") - curr_company_ticker: list = companies_list[0][company_idx - 1] + assert isinstance(company_id, int) + curr_company_ticker: str = companies_list[0][int(company_id) - 1] companies_data[curr_company_ticker] = modified_data # Uncomment below for full company names in selection rather than ticker symbols. - # curr_company_name = companies_list[1][company_idx-1] + # curr_company_name = companies_list[1][company_id-1] # companies_data[curr_company_name] = modified_data all_companies_data: pd.DataFrame = pd.DataFrame(companies_data) conn.close() diff --git a/src/requirements.txt b/src/requirements.txt new file mode 100755 index 0000000..a291bde --- /dev/null +++ b/src/requirements.txt @@ -0,0 +1,50 @@ +appdirs==1.4.4 +asgiref==3.7.2 +beautifulsoup4==4.12.3 +certifi==2023.5.7 +charset-normalizer==3.1.0 +coverage==7.4.3 +Django==4.1.12 +django-admin-volt==1.0.10 +exceptiongroup==1.1.1 +Flask==3.0.2 +frozendict==2.4.0 +gunicorn==21.2.0 +html5lib==1.1 +idna==3.4 +iniconfig==2.0.0 +install==1.3.5 +jmespath==1.0.1 +lxml==4.9.3 +multitasking==0.0.11 +mypy==1.8.0 +mypy-extensions==1.0.0 +numpy==1.26.4 +packaging==23.1 +pandas==2.2.1 +pandas-stubs==2.2.0.240218 +peewee==3.17.1 +pluggy==1.0.0 +psycopg2-binary==2.9.9 +pytest==7.3.1 +pytest-cov==4.1.0 +python-dateutil==2.8.2 +python-dotenv==1.0.0 +pytz==2023.3 +requests==2.31.0 +ruff==0.3.0 +six==1.16.0 +soupsieve==2.5 +sqlparse==0.4.4 +tomli==2.0.1 +types-openpyxl==3.1.0.20240301 +types-psycopg2==2.9.21.20240311 +types-pytz==2024.1.0.20240203 +types-requests==2.31.0.20240218 +typing_extensions==4.8.0 +tzdata==2023.3 +urllib3==2.0.2 +utils==1.0.1 +webencodings==0.5.1 +whitenoise==6.5.0 +yfinance==0.2.37 diff --git a/streamlit/Dockerfile b/streamlit/Dockerfile new file mode 100644 index 0000000..94dd753 --- /dev/null +++ b/streamlit/Dockerfile @@ -0,0 +1,27 @@ +FROM python:3.10-slim + +# Set a directory for the app +WORKDIR /app + +# Copy all the files to the container +COPY . . + +# Install dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + curl \ + software-properties-common \ + git \ + && rm -rf /var/lib/apt/lists/* + +# Install python dependencies +RUN pip3 install -r requirements.txt + +# Expose port +EXPOSE 8501 + +# Check the health of app +HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health + +# Set the entrypoint +ENTRYPOINT ["streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"] diff --git a/streamlit/entrypoint.sh b/streamlit/entrypoint.sh new file mode 100644 index 0000000..1bd0ee6 --- /dev/null +++ b/streamlit/entrypoint.sh @@ -0,0 +1,6 @@ +#!/bin/bash +mkdir /temp_data +chown -R postgres:postgres /temp_data +apt-get update +apt-get install -y nfs-common +python3 src/flask_endpoints.py diff --git a/streamlit/requirements.txt b/streamlit/requirements.txt new file mode 100755 index 0000000..bd70e41 --- /dev/null +++ b/streamlit/requirements.txt @@ -0,0 +1,42 @@ +appdirs==1.4.4 +asgiref==3.7.2 +beautifulsoup4==4.12.3 +certifi==2023.5.7 +charset-normalizer==3.1.0 +coverage==7.4.3 +exceptiongroup==1.1.1 +frozendict==2.4.0 +gunicorn==21.2.0 +html5lib==1.1 +idna==3.4 +iniconfig==2.0.0 +install==1.3.5 +jmespath==1.0.1 +lxml==4.9.3 +multitasking==0.0.11 +numpy==1.26.4 +packaging==23.1 +pandas==2.2.1 +pandas-stubs==2.2.0.240218 +peewee==3.17.1 +plotly==5.20.0 +pluggy==1.0.0 +python-dateutil==2.8.2 +python-dotenv==1.0.0 +pytz==2023.3 +requests==2.31.0 +ruff==0.3.0 +six==1.16.0 +soupsieve==2.5 +sqlparse==0.4.4 +streamlit==1.33.0 +tomli==2.0.1 +types-openpyxl==3.1.0.20240301 +types-pytz==2024.1.0.20240203 +types-requests==2.31.0.20240218 +typing_extensions==4.8.0 +tzdata==2023.3 +urllib3==2.0.2 +utils==1.0.1 +webencodings==0.5.1 +whitenoise==6.5.0 diff --git a/streamlit/streamlit_app.py b/streamlit/streamlit_app.py new file mode 100644 index 0000000..42bcb0d --- /dev/null +++ b/streamlit/streamlit_app.py @@ -0,0 +1,46 @@ +"""This module configures the streamlit web app.""" +import json +import requests +import plotly.express as px # type: ignore[import-untyped] # pylint: disable=E0401 +import pandas as pd +import streamlit as st # type: ignore[import-untyped] # pylint: disable=E0401 + + +company_list_response: requests.Response = requests.get( + "http://core-modules:5000/model/generate_company_list" + ) +company_list: list = company_list_response.json() +st.write("Hello, let's learn more about a company together!") +company = st.selectbox("Pick a company", [None] + company_list) +st.write("You selected:", company) + +if company: + payload: dict = {"company": company} + update_response: requests.Response = requests.post( + "http://core-modules:5000/update_data", json=payload + ) + price: float = update_response.json()["price"] + news: list = update_response.json()["news"] + st.sidebar.write(f"{company}'s most recent price: {price}") + + news_container = st.sidebar.container() + for article in news: + news_container.markdown(f"- [{article['title']}]({article['url']})") + + chart_data: dict = json.loads(update_response.json()["graph"]) + date: dict + close: dict + date, close = chart_data["date"].values(), chart_data["close"].values() + df = pd.DataFrame(close, date) + + fig = px.line(df) + + fig.update_layout( + title=company, + xaxis_title='Date', + yaxis_title='Close', + width=800, + height=600 + ) + + st.plotly_chart(fig)