Skip to content

Commit

Permalink
First attempt at deploying streamlit
Browse files Browse the repository at this point in the history
  • Loading branch information
wleong1 committed Apr 8, 2024
1 parent d163dee commit 57425d6
Show file tree
Hide file tree
Showing 24 changed files with 39 additions and 710 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

# Personal files
parameters.py
*.toml

# Python runtime
*.pyc
Expand Down
18 changes: 0 additions & 18 deletions Dockerfile

This file was deleted.

15 changes: 0 additions & 15 deletions docs/explanation.md

This file was deleted.

6 changes: 0 additions & 6 deletions docs/how-to-guides.md

This file was deleted.

30 changes: 0 additions & 30 deletions docs/index.md

This file was deleted.

8 changes: 0 additions & 8 deletions docs/reference.md

This file was deleted.

16 changes: 0 additions & 16 deletions docs/tutorials.md

This file was deleted.

File renamed without changes.
37 changes: 14 additions & 23 deletions src/model.py → model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

from typing import Union, Tuple
import warnings
import psycopg2 # pylint: disable=E0401
import pandas as pd
import streamlit as st

warnings.filterwarnings("ignore")

Expand All @@ -21,23 +21,15 @@ def generate_company_list() -> Tuple[list, list]:
:return: (list) A list of companies.
"""
conn = psycopg2.connect(
host="stocks-postgres",
database="stocks",
user="postgres",
password="123456",
port="5432")
cursor = conn.cursor()
cursor.execute("SELECT * FROM companies;")
records = cursor.fetchall()
conn = st.connection("postgresql", type="sql")
records = conn.query("SELECT * FROM companies;", ttl="10")
ticker_list: list = []
companies_list: list = []
for row in records:
(_, ticker, company) = row
for row in records.itertuples():
ticker, company = row.ticker, row.company_name
company = company.replace("\xa0", " ")
ticker_list.append(ticker)
companies_list.append(company)
conn.close()
return ticker_list, companies_list

def check_headers_and_data(self, file, expected_headers) -> bool:
Expand Down Expand Up @@ -88,18 +80,12 @@ def process_data(self) -> Union[pd.DataFrame, str]:
"""
companies_list: Tuple[list, list] = self.generate_company_list()
companies_data: dict = {}
conn: psycopg2.extensions.connection = psycopg2.connect(
host="stocks-postgres",
database="stocks",
user="postgres",
password="123456",
port="5432"
)
conn = st.connection("postgresql", type="sql")
number_of_companies: int = len(companies_list[0])
for company_idx in range(1, number_of_companies + 1):
query: str = f"SELECT trade_date, close FROM stock_prices_main \
query: str = f"SELECT trade_date, close FROM stock_prices \
WHERE company_id = {company_idx} ORDER BY trade_date ASC;"
company_df: pd.DataFrame = pd.read_sql(query, conn)
company_df: pd.DataFrame = conn.query(query, ttl="10m")
company_df["trade_date"] = pd.to_datetime(company_df["trade_date"])
company_df["trade_date"] = company_df["trade_date"].dt.strftime("%Y-%m-%d")
company_df["close"] = pd.to_numeric(company_df["close"])
Expand All @@ -110,5 +96,10 @@ def process_data(self) -> Union[pd.DataFrame, str]:
# curr_company_name = companies_list[1][company_idx-1]
# companies_data[curr_company_name] = modified_data
all_companies_data: pd.DataFrame = pd.DataFrame(companies_data)
conn.close()
return all_companies_data

# a = Model()
# print(a.generate_company_list())
# a = Model()
# data = a.process_data()
# print(data.keys())
File renamed without changes.
28 changes: 0 additions & 28 deletions src/Dockerfile

This file was deleted.

Empty file removed src/__init__.py
Empty file.
3 changes: 0 additions & 3 deletions src/entrypoint.sh

This file was deleted.

105 changes: 0 additions & 105 deletions src/flask_endpoints.py

This file was deleted.

3 changes: 0 additions & 3 deletions src/parameters.py

This file was deleted.

49 changes: 0 additions & 49 deletions src/requirements.txt

This file was deleted.

Loading

0 comments on commit 57425d6

Please sign in to comment.