diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..07761fb --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,76 @@ +name: ci +on: + push: + branches: + - '*' +permissions: + contents: write +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Configure Git Credentials + run: | + git config user.name github-actions[bot] + git config user.email 41898282+github-actions[bot]@users.noreply.github.com + - uses: actions/setup-python@v5 + with: + python-version: 3.x + - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV + - uses: actions/cache@v4 + with: + key: mkdocs-material-${{ env.cache_id }} + path: .cache + restore-keys: | + mkdocs-material- + - run: pip install mkdocs "mkdocstrings[python]" mkdocs-material + - run: mkdocs gh-deploy --force + + mypy: + runs-on: ubuntu-latest + name: Mypy + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.x + uses: actions/setup-python@v2 + with: + python-version: 3.x + - name: Install Dependencies + run: | + pip install mypy requests types-requests pandas-stubs + - name: mypy + run: | + mypy src/ + + pylint: + runs-on: ubuntu-latest + name: pylint + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.x + uses: actions/setup-python@v2 + with: + python-version: 3.x + - name: Install Dependencies + run: | + pip install pylint requests pandas + - name: pylint + run: | + pylint src/ + + pytest: + runs-on: ubuntu-latest + name: pytest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.x + uses: actions/setup-python@v2 + with: + python-version: 3.x + - name: Install Dependencies + run: | + pip install pytest requests pandas yfinance + - name: pytest + run: | + pytest tests/ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..5166d7f --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:22.04 + +# set a directory for the app +WORKDIR /usr/workspace/ + +# copy all the files to the container +COPY . . + +# install dependencies +RUN apt-get update +RUN apt-get install -y python3 +RUN apt install -y python3-pip +RUN pip install --no-cache-dir -r requirements.txt + +# EXPOSE 8000 + +# CMD cd django-stock-tracker && python3 manage.py runserver 0.0.0.0:8000 Can run this in the terminal +# but need to specify docker run -it -p 8888:8000 stock \ No newline at end of file diff --git a/django-stock-tracker/CHANGELOG.md b/django-stock-tracker/CHANGELOG.md new file mode 100644 index 0000000..8e08c01 --- /dev/null +++ b/django-stock-tracker/CHANGELOG.md @@ -0,0 +1,127 @@ +# Change Log + +## [1.0.14] 2023-02-14 +### Changes + +- Update [Custom Development](https://appseed.us/custom-development/) Section +- Minor Changes (readme) + +## [1.0.13] 2023-10-21 +### Changes + +- Update Dependencies +- Update README + +## [1.0.12] 2023-03-01 +### Changes + +- Bump design version: + - [Django Admin Volt](https://github.com/app-generator/django-admin-volt) `v1.0.10` + +## [1.0.11] 2023-01-31 +### Changes + +- DOCS Update (readme) +- Bump design version: + - [Django Admin Volt](https://github.com/app-generator/django-admin-volt) `v1.0.9` + +## [1.0.10] 2023-01-28 +### Changes + +- DOCS Update (readme). New sections: + - `How to customize the theme` + - Render deployment +- Configure the project to use `home/templates` +- Added `custom_footer` sample + +## [1.0.9] 2023-01-07 +### Changes + +- Move to a `theme-based` pattern + - [Django Admin Volt](https://github.com/app-generator/django-admin-volt) +- 🚀 `Deployment` + - `CI/CD` flow via `Render` + +## [1.0.8] 2022-09-14 +### Improvements + +- Added **Github OAuth** via AllAuth. requires in `.env`: + - `GITHUB_ID`= + - `GITHUB_SECRET`= +- `Docker` Improvements +- Pages UX Updates: + - `SignIN`, `SignUP`, `Settings` + +## [1.0.7] 2022-05-25 +### Improvements + +- Built with [Volt Dashboard Generator](https://appseed.us/generator/volt-dashboard/) + - Timestamp: `2022-05-25 22:38` +- Codebase refactoring +- Added CDN Support + - via `.env` **ASSETS_ROOT** + +## [1.0.6] 2022-01-16 +### Improvements + +- Bump Django Codebase to [v2stable.0.1](https://github.com/app-generator/boilerplate-code-django-dashboard/releases) +- Dependencies update (all packages) + - Django==4.0.1 +- Settings update for Django 4.x + - `New Parameter`: CSRF_TRUSTED_ORIGINS + - [Origin header checking isn`t performed in older versions](https://docs.djangoproject.com/en/4.0/ref/settings/#csrf-trusted-origins) + +## [1.0.5] 2021-10-06 +### Improvements + +- Mention `admin` section custom theme + - [Django Admin Volt](https://pypi.org/project/django-admin-volt/) available on PyPi and Github +- `Presentation` page + - Update Top links + +## [1.0.4] 2021-09-15 +### Improvements + +- Bump Django Codebase to [v2.0.4](https://github.com/app-generator/boilerplate-code-django-dashboard/releases) + - Codebase update + - `assets` & `templates` moved to `apps` folder + - `apps/base` renamed to `apps/home` + +## [1.0.3] 2021-09-07 +### Improvements & Fixes + +- Bump Django Codebase to [v2.0.2](https://github.com/app-generator/boilerplate-code-django-dashboard/releases) + - Dependencies update (all packages) + - Use Django==3.2.6 (latest stable version) + - Better Code formatting + - Improved Files organization + - Optimize imports + - Docker Scripts Update +- Fixes: + - Patch 500 Error when authenticated users access `admin` path (no slash at the end) + - Patch [#16](https://github.com/app-generator/boilerplate-code-django-dashboard/issues/16): Minor issue in Docker + +## [1.0.2] 2021-08-27 +### Improvements + +- Bump UI - [Volt Dashboard v1.4.1](https://github.com/themesberg/volt-bootstrap-5-dashboard/releases) + +## Unreleased 2021-08-05 +### Tooling + +- Added scripts to recompile the SCSS files + - `core/static/assets/` - gulpfile.js + - `core/static/assets/` - package.json +- `Update README` - [Recompile SCSS](https://github.com/app-generator/django-dashboard-volt#recompile-css) (new section) + +## [1.0.1] 2021-03-30 +### Improvements + +- Bump UI: [Jinja Volt](https://github.com/app-generator/jinja-volt-dashboard/releases) v1.0.1 +- [Volt Dashboard](https://github.com/themesberg/volt-bootstrap-5-dashboard/releases) v1.3.2 + +## [1.0.0] 2021-01-17 + +- Bump UI: [Jinja Volt](https://github.com/app-generator/jinja-volt-dashboard/releases) v1.0.0 +- [Volt Dashboard](https://github.com/themesberg/volt-bootstrap-5-dashboard/releases/tag) v1.2.0 +- Codebase: [Django Dashboard](https://github.com/app-generator/boilerplate-code-django-dashboard/releases) v1.0.4 diff --git a/django-stock-tracker/Dockerfile b/django-stock-tracker/Dockerfile new file mode 100644 index 0000000..253fafb --- /dev/null +++ b/django-stock-tracker/Dockerfile @@ -0,0 +1,18 @@ +FROM python:3.9 + +# set environment variables +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 + +COPY requirements.txt . +# install python dependencies +RUN pip install --upgrade pip +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +# running migrations +RUN python manage.py migrate + +# gunicorn +CMD ["gunicorn", "--config", "gunicorn-cfg.py", "core.wsgi"] diff --git a/django-stock-tracker/LICENSE.md b/django-stock-tracker/LICENSE.md new file mode 100644 index 0000000..5012dd9 --- /dev/null +++ b/django-stock-tracker/LICENSE.md @@ -0,0 +1,32 @@ +# MIT License + +Copyright (c) 2019 - present [AppSeed](http://appseed.us/) + +
+ +## Licensing Information + +
+ +| Item | - | +| ---------------------------------- | --- | +| License Type | MIT | +| Use for print | **YES** | +| Create single personal website/app | **YES** | +| Create single website/app for client | **YES** | +| Create multiple website/apps for clients | **YES** | +| Create multiple SaaS applications | **YES** | +| End-product paying users | **YES** | +| Product sale | **YES** | +| Remove footer credits | **YES** | +| --- | --- | +| Remove copyright mentions from source code | NO | +| Production deployment assistance | NO | +| Create HTML/CSS template for sale | NO | +| Create Theme/Template for CMS for sale | NO | +| Separate sale of our UI Elements | NO | + +
+ +--- +For more information regarding licensing, please contact the AppSeed Service < *support@appseed.us* > diff --git a/django-stock-tracker/README.md b/django-stock-tracker/README.md new file mode 100644 index 0000000..b0a38f7 --- /dev/null +++ b/django-stock-tracker/README.md @@ -0,0 +1,202 @@ +# [Volt Dashboard Django](https://appseed.us/product/volt-dashboard/django/) + +Open-source **[Django Dashboards](https://appseed.us/admin-dashboards/django/)** built on top of a modern design. **Volt Dashboard** is a free and open source `Bootstrap 5` Admin Dashboard featuring over 100 components, 11 example pages and 3 plugins with Vanilla JS. There are more than 100 free Bootstrap 5 components included some of them being buttons, alerts, modals, and date pickers. + +- 👉 [Volt Dashboard Django](https://appseed.us/product/volt-dashboard/django/) - Product page +- 👉 [Volt Dashboard Django](https://django-volt-dashboard.appseed-srv1.com/) - LIVE Demo + +
+ +## Features + +> `Have questions?` Contact **[Support](https://appseed.us/support/)** (Email & Discord) provided by **AppSeed** + +| Free Version | [PRO Version](https://appseed.us/product/volt-dashboard-pro/django/) | 🚀 Custom - $5,999 | +| --------------------------------------| --------------------------------------| --------------------------------------| +| ✓ **Django 4.1.12** | **Everything in Free**, plus: | **Everything in PRO**, plus: | +| ✓ Best Practices | ✅ **Premium Bootstrap 5 Design** | ✅ **1mo Custom Development** | +| ✓ `Bootstrap 5`, Volt Dashboard UI | ✅ `OAuth` Google, GitHub | ✅ **Team**: PM, Developer, Tester | +| ✓ `Docker` | ✅ Extended User Model | ✅ Weekly Sprints | +| ✓ `CI/CD` Flow via Render | ✅ `Private REPO Access` | ✅ Technical SPECS | +| ✓ `Free Support` | ✅ **PRO Support** - [Email & Discord](https://appseed.us/support/) | ✅ Documentation | +| - | ✅ Deployment Assistance | ✅ **30 days Delivery Warranty** | +| ------------------------------------ | ------------------------------------ | ------------------------------------| +| ✓ [LIVE Demo](https://django-volt-dashboard.appseed-srv1.com/) | 🚀 [LIVE Demo](https://django-volt-enh.appseed-srv1.com/) | 🛒 `Order`: **[$5,999](https://appseed.gumroad.com/l/rocket-package)** (GUMROAD) | + + +![Volt Dashboard - Full-Stack Starter generated by AppSeed.](https://user-images.githubusercontent.com/51070104/168843604-b026fd94-5969-4be7-81ac-5887cf0958e5.png) + +
+ +## Manual Build + +> 👉 Download the code + +```bash +$ git clone https://github.com/app-generator/django-volt-dashboard.git +$ cd django-volt-dashboard +``` + +
+ +> 👉 Install modules via `VENV` + +```bash +$ virtualenv env +$ source env/bin/activate +$ pip install -r requirements.txt +``` + +
+ +> 👉 Set Up Database + +```bash +$ python manage.py makemigrations +$ python manage.py migrate +``` + +
+ +> 👉 Create the Superuser + +```bash +$ python manage.py createsuperuser +``` + +
+ +> 👉 Start the app + +```bash +$ python manage.py runserver +``` + +At this point, the app runs at `http://127.0.0.1:8000/`. + +
+ +## Codebase structure + +The project is coded using a simple and intuitive structure presented below: + +```bash +< PROJECT ROOT > + | + |-- core/ + | |-- settings.py # Project Configuration + | |-- urls.py # Project Routing + | + |-- home/ + | |-- views.py # APP Views + | |-- urls.py # APP Routing + | |-- models.py # APP Models + | |-- tests.py # Tests + | |-- templates/ # Theme Customisation + | |-- includes # + | |-- custom-footer.py # Custom Footer + | + |-- requirements.txt # Project Dependencies + | + |-- env.sample # ENV Configuration (default values) + |-- manage.py # Start the app - Django default start script + | + |-- ************************************************************************ +``` + +
+ +## How to Customize + +When a template file is loaded in the controller, `Django` scans all template directories starting from the ones defined by the user, and returns the first match or an error in case the template is not found. +The theme used to style this starter provides the following files: + +```bash +# This exists in ENV: LIB/admin_volt +< UI_LIBRARY_ROOT > + | + |-- templates/ # Root Templates Folder + | | + | |-- accounts/ + | | |-- sign-in.html # Sign IN Page + | | |-- sign-up.html # Sign UP Page + | | + | |-- includes/ + | | |-- footer.html # Footer component + | | |-- sidebar.html # Sidebar component + | | |-- navigation.html # Navigation Bar + | | |-- scripts.html # Scripts Component + | | + | |-- layouts/ + | | |-- base.html # Masterpage + | | |-- base-auth.html # Masterpage for Auth Pages + | | + | |-- pages/ + | |-- index.html # Index Page (presentation) + | |-- settings.html # Settings Page + | |-- dashboard.html # Dashboard page + | |-- *.html # All other pages + | + |-- ************************************************************************ +``` + +When the project requires customization, we need to copy the original file that needs an update (from the virtual environment) and place it in the template folder using the same path. + +> For instance, if we want to **customize the footer.html** these are the steps: + +- ✅ `Step 1`: create the `templates` DIRECTORY inside the `home` app +- ✅ `Step 2`: configure the project to use this new template directory + - `core/settings.py` TEMPLATES section +- ✅ `Step 3`: copy the `footer.html` from the original location (inside your ENV) and save it to the `home/templates` DIR + - Source PATH: `/LIB/admin_volt/includes/footer.html` + - Destination PATH: `home/templates/includes/footer.html` + +> To speed up all these steps, the **codebase is already configured** (`Steps 1, and 2`) and a `custom footer` can be found at this location: + +`home/templates/includes/custom_footer.html` + +By default, this file is unused because the `theme` expects `footer.html` (without the `custom-` prefix). + +In order to use it, simply rename it to `footer.html`. Like this, the default version shipped in the library is ignored by Django. + +In a similar way, all other files and components can be customized easily. + +
+ +## Deploy on [Render](https://render.com/) + +- Create a Blueprint instance + - Go to https://dashboard.render.com/blueprints this link. +- Click `New Blueprint Instance` button. +- Connect your `repo` which you want to deploy. +- Fill the `Service Group Name` and click on `Update Existing Resources` button. +- After that your deployment will start automatically. + +At this point, the product should be LIVE. + +
+ +## [PRO Version](https://appseed.us/product/volt-dashboard-pro/django/) + +This design is a pixel-perfect [Bootstrap 5](https://www.admin-dashboards.com/bootstrap-5-templates/) Dashboard with a fresh, new design inspired by Google's Material Design. `Volt Dashboard PRO` is built with over 300 frontend individual elements, like buttons, inputs, navbars, nav tabs, cards, or alerts, giving you the freedom of choosing and combining. + +> Features: + +- ✅ `Up-to-date Dependencies` +- ✅ `Design`: [Django Theme Volt](https://github.com/app-generator/django-admin-volt-pro) - `PRO Version` +- ✅ `Sections` covered by the design: + - ✅ **Admin section** (reserved for superusers) + - ✅ **Authentication**: `Django.contrib.AUTH`, Registration + - ✅ **All Pages** available in for ordinary users +- ✅ `Docker` +- 🚀 `Deployment` + - `CI/CD` flow via `Render` + +
+ +![Volt Dashboard PRO - Starter generated by AppSeed.](https://user-images.githubusercontent.com/51070104/172672843-8c40a801-3438-4e9c-86db-38a34191fbdf.png) + +
+ +--- +[Django Volt Dashboard](https://appseed.us/product/volt-dashboard/django/) - **Django** starter provided by **[AppSeed](https://appseed.us/)** diff --git a/django-stock-tracker/README_deploy.md b/django-stock-tracker/README_deploy.md new file mode 100644 index 0000000..a1ac7e5 --- /dev/null +++ b/django-stock-tracker/README_deploy.md @@ -0,0 +1,24 @@ +# How to deploy on `Render` + +> This document should contains all the steps to deploy the app on render without much effort, using PostgreSQL + +https://render.com/docs/deploy-django + +## ALL STEPS below + +
+ +### 👉 Create `PostgreSQL` database on render + - Go to https://dashboard.render.com/new/database this link. + - Database name should be `berry`. + - Keep the Database, User and Datadog API Key as it is. + - If you want to change database name anything else then you have to change your `render.yaml` file database name too. + +
+ +### 👉 Create a Blueprint instance + - Go to https://dashboard.render.com/blueprints this link. + - Click `New Blueprint Instance` button. + - Connect your `repo` which you want to deploy. + - Fill the `Service Group Name` and click on `Update Existing Resources` button. + - After that your deployment will start automatically. \ No newline at end of file diff --git a/django-stock-tracker/build.sh b/django-stock-tracker/build.sh new file mode 100644 index 0000000..475c139 --- /dev/null +++ b/django-stock-tracker/build.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +# exit on error +set -o errexit + +python -m pip install --upgrade pip + +pip install -r requirements.txt + +python manage.py collectstatic --no-input +python manage.py migrate diff --git a/models/forecasting-algorithms/__init__.py b/django-stock-tracker/core/__init__.py similarity index 100% rename from models/forecasting-algorithms/__init__.py rename to django-stock-tracker/core/__init__.py diff --git a/django-stock-tracker/core/asgi.py b/django-stock-tracker/core/asgi.py new file mode 100644 index 0000000..26a5940 --- /dev/null +++ b/django-stock-tracker/core/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for core project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.1/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings") + +application = get_asgi_application() diff --git a/django-stock-tracker/core/settings.py b/django-stock-tracker/core/settings.py new file mode 100644 index 0000000..0a2ce3b --- /dev/null +++ b/django-stock-tracker/core/settings.py @@ -0,0 +1,165 @@ +""" +Django settings for core project. + +Generated by 'django-admin startproject' using Django 4.1.2. + +For more information on this file, see +https://docs.djangoproject.com/en/4.1/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/4.1/ref/settings/ +""" + +import os, random, string +from pathlib import Path +from dotenv import load_dotenv + +load_dotenv() # take environment variables from .env. + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/4.1/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = os.environ.get('SECRET_KEY') +if not SECRET_KEY: + SECRET_KEY = ''.join(random.choice( string.ascii_lowercase ) for i in range( 32 )) + +# Render Deployment Code +DEBUG = 'RENDER' not in os.environ + +ALLOWED_HOSTS = [] + +RENDER_EXTERNAL_HOSTNAME = os.environ.get('RENDER_EXTERNAL_HOSTNAME') +if RENDER_EXTERNAL_HOSTNAME: + ALLOWED_HOSTS.append(RENDER_EXTERNAL_HOSTNAME) + +# Application definition + +INSTALLED_APPS = [ + 'admin_volt.apps.AdminVoltConfig', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + + "home", +] + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "whitenoise.middleware.WhiteNoiseMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +ROOT_URLCONF = "core.urls" + +HOME_TEMPLATES = os.path.join(BASE_DIR, 'home', 'templates') + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [HOME_TEMPLATES], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "core.wsgi.application" + + +# Database +# https://docs.djangoproject.com/en/4.1/ref/settings/#databases + +DB_ENGINE = os.getenv('DB_ENGINE' , None) +DB_USERNAME = os.getenv('DB_USERNAME' , None) +DB_PASS = os.getenv('DB_PASS' , None) +DB_HOST = os.getenv('DB_HOST' , None) +DB_PORT = os.getenv('DB_PORT' , None) +DB_NAME = os.getenv('DB_NAME' , None) + +if DB_ENGINE and DB_NAME and DB_USERNAME: + DATABASES = { + 'default': { + 'ENGINE' : 'django.db.backends.' + DB_ENGINE, + 'NAME' : DB_NAME, + 'USER' : DB_USERNAME, + 'PASSWORD': DB_PASS, + 'HOST' : DB_HOST, + 'PORT' : DB_PORT, + }, + } +else: + DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': 'db.sqlite3', + } + } + +# Password validation +# https://docs.djangoproject.com/en/4.1/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/4.1/topics/i18n/ + +LANGUAGE_CODE = "en-us" + +TIME_ZONE = "UTC" + +USE_I18N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/4.1/howto/static-files/ + +STATIC_URL = '/static/' +# STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') +STATICFILES_DIRS = [ + BASE_DIR / 'static', +] + +#if not DEBUG: +# STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' + +# Default primary key field type +# https://docs.djangoproject.com/en/4.1/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + +LOGIN_REDIRECT_URL = '/' +EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' diff --git a/django-stock-tracker/core/urls.py b/django-stock-tracker/core/urls.py new file mode 100644 index 0000000..202d1bd --- /dev/null +++ b/django-stock-tracker/core/urls.py @@ -0,0 +1,23 @@ +"""core URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/4.1/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.contrib import admin +from django.urls import include, path + +urlpatterns = [ + path('', include('home.urls')), + path("admin/", admin.site.urls), + path("", include('admin_volt.urls')) +] diff --git a/django-stock-tracker/core/wsgi.py b/django-stock-tracker/core/wsgi.py new file mode 100644 index 0000000..e9b9d8a --- /dev/null +++ b/django-stock-tracker/core/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for core project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.1/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings") + +application = get_wsgi_application() diff --git a/django-stock-tracker/db.sqlite3 b/django-stock-tracker/db.sqlite3 new file mode 100644 index 0000000..76c5788 Binary files /dev/null and b/django-stock-tracker/db.sqlite3 differ diff --git a/django-stock-tracker/docker-compose.yml b/django-stock-tracker/docker-compose.yml new file mode 100644 index 0000000..d8a7514 --- /dev/null +++ b/django-stock-tracker/docker-compose.yml @@ -0,0 +1,27 @@ +version: '3.8' +services: + appseed-app: + container_name: appseed_app + restart: always + build: . + networks: + - db_network + - web_network + nginx: + container_name: nginx + restart: always + image: "nginx:latest" + ports: + - "5085:5085" + volumes: + - ./nginx:/etc/nginx/conf.d + networks: + - web_network + depends_on: + - appseed-app +networks: + db_network: + driver: bridge + web_network: + driver: bridge + \ No newline at end of file diff --git a/django-stock-tracker/env.sample b/django-stock-tracker/env.sample new file mode 100644 index 0000000..731756c --- /dev/null +++ b/django-stock-tracker/env.sample @@ -0,0 +1,11 @@ +# True for development, False for production +DEBUG=True + +SECRET_KEY= + +# DB_ENGINE=mysql +# DB_HOST=localhost +# DB_NAME=appseed_db +# DB_USERNAME=appseed_db_usr +# DB_PASS=pass +# DB_PORT=3306 \ No newline at end of file diff --git a/django-stock-tracker/gunicorn-cfg.py b/django-stock-tracker/gunicorn-cfg.py new file mode 100644 index 0000000..3acce75 --- /dev/null +++ b/django-stock-tracker/gunicorn-cfg.py @@ -0,0 +1,11 @@ +# -*- encoding: utf-8 -*- +""" +Copyright (c) 2019 - present AppSeed.us +""" + +bind = '0.0.0.0:5005' +workers = 1 +accesslog = '-' +loglevel = 'debug' +capture_output = True +enable_stdio_inheritance = True diff --git a/tests/sample_data/CompanyD_data.txt b/django-stock-tracker/home/__init__.py similarity index 100% rename from tests/sample_data/CompanyD_data.txt rename to django-stock-tracker/home/__init__.py diff --git a/django-stock-tracker/home/admin.py b/django-stock-tracker/home/admin.py new file mode 100644 index 0000000..8c38f3f --- /dev/null +++ b/django-stock-tracker/home/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/django-stock-tracker/home/apps.py b/django-stock-tracker/home/apps.py new file mode 100644 index 0000000..e7d1c7e --- /dev/null +++ b/django-stock-tracker/home/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class HomeConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "home" diff --git a/tests/sample_data/CompanyE_data.xml b/django-stock-tracker/home/migrations/__init__.py similarity index 100% rename from tests/sample_data/CompanyE_data.xml rename to django-stock-tracker/home/migrations/__init__.py diff --git a/django-stock-tracker/home/models.py b/django-stock-tracker/home/models.py new file mode 100644 index 0000000..71a8362 --- /dev/null +++ b/django-stock-tracker/home/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/django-stock-tracker/home/templates/base.html b/django-stock-tracker/home/templates/base.html new file mode 100644 index 0000000..d5e1fcc --- /dev/null +++ b/django-stock-tracker/home/templates/base.html @@ -0,0 +1,54 @@ + + + + + + {% include 'includes/head.html' %} + {% block extrastyle %}{% endblock extrastyle %} + {% block extrahead %}{% endblock extrahead %} + + + {% block nav %} + {% include 'includes/nav.html' %} + {% endblock nav %} + + {% block sidebar %} + {% include 'includes/sidebar.html' %} + {% endblock sidebar %} + +
+ {% block header %} + {% include 'includes/navigation.html' %} + {% endblock header %} + + {% block content %}{% endblock content %} + + {% block theme_settings %} + {% include 'includes/theme-settings.html' %} + {% endblock theme_settings %} + + {% block footer %} + {% include 'includes/footer.html' %} + {% endblock footer %} + +
+ + {% include 'includes/scripts.html' %} + {% block extra_js %}{% endblock extra_js %} + \ No newline at end of file diff --git a/django-stock-tracker/home/templates/charts.html b/django-stock-tracker/home/templates/charts.html new file mode 100644 index 0000000..069677f --- /dev/null +++ b/django-stock-tracker/home/templates/charts.html @@ -0,0 +1,41 @@ + + + + + Chart.js Example + + + + + + + + diff --git a/django-stock-tracker/home/templates/custom-footer.html b/django-stock-tracker/home/templates/custom-footer.html new file mode 100644 index 0000000..4e8cf23 --- /dev/null +++ b/django-stock-tracker/home/templates/custom-footer.html @@ -0,0 +1,23 @@ + \ No newline at end of file diff --git a/django-stock-tracker/home/templates/dashboard.html b/django-stock-tracker/home/templates/dashboard.html new file mode 100644 index 0000000..4da0d7c --- /dev/null +++ b/django-stock-tracker/home/templates/dashboard.html @@ -0,0 +1,235 @@ +{% extends 'base.html' %} +{% load static %} + +{% block content %} + +
+ + + + +
+
+
+
+
+ + + +
+
+
+
+
+
+
+
+
+ + + + +
+
+
+
+

Current price

+

0

+
+ + Feb 1 - Apr 1, + + + + USA + +
+
Since last month + + 22%
+
+
+
+
+
+
+
+
+
+
+
+
+ + + +
+
+
+
+

Current News

+
News
+
+ + Feb 1 - Apr 1, + + + + GER + +
+
Since last month + + 2%
+
+
+
+
+
+
+
+
+
+ +{% endblock content %} diff --git a/django-stock-tracker/home/templates/includes/footer.html b/django-stock-tracker/home/templates/includes/footer.html new file mode 100644 index 0000000..edc3559 --- /dev/null +++ b/django-stock-tracker/home/templates/includes/footer.html @@ -0,0 +1,9 @@ +
+
+
+

+ Stock Tracker - yoinked from AppSeed. +

+
+
+
\ No newline at end of file diff --git a/django-stock-tracker/home/templates/includes/head.html b/django-stock-tracker/home/templates/includes/head.html new file mode 100644 index 0000000..9ce1b8f --- /dev/null +++ b/django-stock-tracker/home/templates/includes/head.html @@ -0,0 +1,45 @@ +{% load static %} + + + +Volt Free Bootstrap Dashboard - Settings + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/django-stock-tracker/home/templates/includes/nav.html b/django-stock-tracker/home/templates/includes/nav.html new file mode 100644 index 0000000..84eb1de --- /dev/null +++ b/django-stock-tracker/home/templates/includes/nav.html @@ -0,0 +1,14 @@ +{% load static %} + + \ No newline at end of file diff --git a/django-stock-tracker/home/templates/includes/navigation.html b/django-stock-tracker/home/templates/includes/navigation.html new file mode 100644 index 0000000..80878c6 --- /dev/null +++ b/django-stock-tracker/home/templates/includes/navigation.html @@ -0,0 +1,241 @@ +{% load static %} + + \ No newline at end of file diff --git a/django-stock-tracker/home/templates/includes/scripts.html b/django-stock-tracker/home/templates/includes/scripts.html new file mode 100644 index 0000000..4ee5e71 --- /dev/null +++ b/django-stock-tracker/home/templates/includes/scripts.html @@ -0,0 +1,42 @@ +{% load static %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/django-stock-tracker/home/templates/includes/sidebar.html b/django-stock-tracker/home/templates/includes/sidebar.html new file mode 100644 index 0000000..1682ee1 --- /dev/null +++ b/django-stock-tracker/home/templates/includes/sidebar.html @@ -0,0 +1,358 @@ +{% load i18n static admin_volt %} + + \ No newline at end of file diff --git a/django-stock-tracker/home/templates/includes/theme-settings.html b/django-stock-tracker/home/templates/includes/theme-settings.html new file mode 100644 index 0000000..4a93202 --- /dev/null +++ b/django-stock-tracker/home/templates/includes/theme-settings.html @@ -0,0 +1,38 @@ +{% load static %} + +
+
+ +
+

Open source 💛

+ Star +
+ + Download + + + + +

Actively supported by AppSeed.

+
+
+ +
+
+ + + + + Settings + +
+
\ No newline at end of file diff --git a/django-stock-tracker/home/templates/sandbox.html b/django-stock-tracker/home/templates/sandbox.html new file mode 100644 index 0000000..e55fdc8 --- /dev/null +++ b/django-stock-tracker/home/templates/sandbox.html @@ -0,0 +1,252 @@ +{% extends 'base.html' %} +{% load static %} + +{% block content %} + +
+ +
+
+
+
+
+
+
Sales Value
+

$10,567

+
+ Yesterday + + 10.57% +
+
+
+ Month + Week +
+
+
+ + + +
+
+
+
+
+
+
+
+
+ + + + +
+
+

Customers

+

345,678

+
+
+
+
+

Customers

+

345k

+
+ + Feb 1 - Apr 1, + + + + USA + +
+
Since last month + + 22%
+
+
+
+
+
+
+
+
+
+
+
+
+ + + +
+
+

Revenue

+

$43,594

+
+
+
+
+

Revenue

+

$43,594

+
+ + Feb 1 - Apr 1, + + + + GER + +
+
Since last month + + 2%
+
+
+
+
+
+
+
+
+
+
+
+
+ + + +
+
+

Bounce Rate

+

50.88%

+
+
+
+
+

Bounce Rate

+

50.88%

+
+ + Feb 1 - Apr 1 + +
+
Since last month + + 4%
+
+
+
+
+
+
+
+
+
+ +{% endblock content %} + + +document.getElementById('dropdown-menu').addEventListener('change', function() { + var selectedValue = this.value; // Get selected value + updateGraph(selectedValue); // Call function to update the graph +}); + +// Function to update the graph based on selected value +function updateGraph(selectedValue) { + // Send AJAX request to backend with selected value + var xhr = new XMLHttpRequest(); + xhr.open('POST', '/update-graph/', true); // Replace with your backend URL + xhr.setRequestHeader('Content-Type', 'application/json'); + xhr.onreadystatechange = function() { + if (xhr.readyState === 4 && xhr.status === 200) { + var responseData = JSON.parse(xhr.responseText); + // Call function to update the graph with received data + updateChart(responseData); + } + }; + xhr.send(JSON.stringify({ value: selectedValue })); +} + +// Function to update the chart with new data +function updateChart(data) { + var dates = data.map(item => item.date); + var closes = data.map(item => item.close); + + // Update the existing chart with new data + myChart.data.labels = dates; + myChart.data.datasets[0].data = closes; + myChart.update(); +} \ No newline at end of file diff --git a/django-stock-tracker/home/tests.py b/django-stock-tracker/home/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/django-stock-tracker/home/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/django-stock-tracker/home/urls.py b/django-stock-tracker/home/urls.py new file mode 100644 index 0000000..5fdaaeb --- /dev/null +++ b/django-stock-tracker/home/urls.py @@ -0,0 +1,10 @@ +from django.urls import path + +from . import views + +urlpatterns = [ + path('', views.index, name='index'), + path('graph', views.update_graph, name="graph"), + path('price', views.update_price, name="price"), + path('news', views.update_news, name="news"), +] diff --git a/django-stock-tracker/home/views.py b/django-stock-tracker/home/views.py new file mode 100644 index 0000000..513f707 --- /dev/null +++ b/django-stock-tracker/home/views.py @@ -0,0 +1,42 @@ +from django.shortcuts import render +from django.http import HttpResponse, JsonResponse +import sys, json +sys.path.append("/usr/workspace/") +sys.path.append("/home/wleong/Personal_project/StockTracker") +from src.model import Model +from src.data_processing import DataProcessing +from src.live_price_display import LivePriceDisplay +from src.news_display import NewsDisplay +import pandas as pd + +# Create your views here. +models = Model() +news_disp = NewsDisplay() +price_disp = LivePriceDisplay() +expected_headers = ["date", "close"] +all_data = models.process_data(expected_headers) + +def index(request): + options = models.generate_company_list() + return render(request, 'dashboard.html', {'dropdown_items': options}) + +def update_graph(request): + company = request.GET.get('company') + raw_data = all_data[company] + data = { + "date": raw_data["date"], + "close": raw_data["close"] + } + df = pd.DataFrame(data) + chart_data = df.to_json(orient='records') + return JsonResponse(chart_data, safe=False) + +def update_price(request): + company = request.GET.get('company') + price = price_disp.display_final_price_yf(company) + return HttpResponse(price) + +def update_news(request): + company = request.GET.get('company') + news = json.dumps(news_disp.format_news_django(company)) + return HttpResponse(news) \ No newline at end of file diff --git a/django-stock-tracker/manage.py b/django-stock-tracker/manage.py new file mode 100644 index 0000000..4e20ce5 --- /dev/null +++ b/django-stock-tracker/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/django-stock-tracker/nginx/appseed-app.conf b/django-stock-tracker/nginx/appseed-app.conf new file mode 100644 index 0000000..61c7a1e --- /dev/null +++ b/django-stock-tracker/nginx/appseed-app.conf @@ -0,0 +1,15 @@ +upstream webapp { + server appseed_app:5005; +} + +server { + listen 5085; + server_name localhost; + + location / { + proxy_pass http://webapp; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + +} diff --git a/django-stock-tracker/render.yaml b/django-stock-tracker/render.yaml new file mode 100644 index 0000000..e90d409 --- /dev/null +++ b/django-stock-tracker/render.yaml @@ -0,0 +1,15 @@ +services: + - type: web + name: django-volt-dash + plan: starter + env: python + region: frankfurt # region should be same as your database region. + buildCommand: "./build.sh" + startCommand: "gunicorn core.wsgi:application" + envVars: + - key: DEBUG + value: False + - key: SECRET_KEY + generateValue: true + - key: WEB_CONCURRENCY + value: 4 diff --git a/django-stock-tracker/requirements.txt b/django-stock-tracker/requirements.txt new file mode 100644 index 0000000..e1908df --- /dev/null +++ b/django-stock-tracker/requirements.txt @@ -0,0 +1,13 @@ +# Core +django==4.1.12 +python-dotenv==1.0.0 + +# UI +django-admin-volt==1.0.10 + +# Deployment +whitenoise==6.5.0 +gunicorn==21.2.0 + +# psycopg2-binary +# mysqlclient diff --git a/django-stock-tracker/static/.gitkeep b/django-stock-tracker/static/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/django-stock-tracker/static/chart.js b/django-stock-tracker/static/chart.js new file mode 100755 index 0000000..78804ab --- /dev/null +++ b/django-stock-tracker/static/chart.js @@ -0,0 +1,21 @@ +var ctx = document.getElementById('myChart').getContext('2d'); +var myChart = new Chart(ctx, { + type: 'bar', + data: { + labels: ['A', 'B', 'C', 'D', 'E'], + datasets: [{ + label: 'Data', + data: chartData, // Replace chartData with your actual data + backgroundColor: 'rgba(255, 99, 132, 0.2)', + borderColor: 'rgba(255, 99, 132, 1)', + borderWidth: 1 + }] + }, + options: { + scales: { + y: { + beginAtZero: true + } + } + } +}); diff --git a/docs/explanation.md b/docs/explanation.md new file mode 100644 index 0000000..1a74ec0 --- /dev/null +++ b/docs/explanation.md @@ -0,0 +1,15 @@ +This part of the project documentation focuses on an +**understanding-oriented** approach. You'll get a +chance to read about the background of the project, +as well as reasoning about how it was implemented. + +> **Note:** Expand this section by considering the +> following points: + +- Give context and background on your library +- Explain why you created it +- Provide multiple examples and approaches of how + to work with it +- Help the reader make connections +- Avoid writing instructions or technical descriptions + here \ No newline at end of file diff --git a/docs/how-to-guides.md b/docs/how-to-guides.md new file mode 100644 index 0000000..18c2ad3 --- /dev/null +++ b/docs/how-to-guides.md @@ -0,0 +1,6 @@ +This part of the project documentation focuses on a +**problem-oriented** approach. You'll tackle common +tasks that you might have, with the help of the code +provided in this project. + +To be continued. \ No newline at end of file diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..f7dd8ac --- /dev/null +++ b/docs/index.md @@ -0,0 +1,30 @@ +This site contains the project documentation for the +'Calculators' project that is used mainly for individual +tax calculation purposes. Its aim is to provide users +with a fairly accurate amount of tax paid, given the +financial year and calculator. + +## Table Of Contents + +The documentation follows the best practice for +project documentation as described by Daniele Procida +in the [Diátaxis documentation framework](https://diataxis.fr/) +and consists of four separate parts: + +1. [Tutorials](tutorials.md) +2. [How-To Guides](how-to-guides.md) +3. [Reference](reference.md) +4. [Explanation](explanation.md) + +Quickly find what you're looking for depending on +your use case by looking at the different pages. + +## Project Overview + +::: src + +## Acknowledgements +I want to thank my house plants for providing me with +a negligible amount of oxygen each day. Also, I want +to thank the sun for providing more than half of their +nourishment free of charge. diff --git a/docs/reference.md b/docs/reference.md new file mode 100644 index 0000000..162a8ec --- /dev/null +++ b/docs/reference.md @@ -0,0 +1,8 @@ +This part of the project documentation focuses on +an **information-oriented** approach. Use it as a +reference for the technical implementation of the +`Calculators` project code. + +::: src.live_price_display +::: src.news_display +::: src.model \ No newline at end of file diff --git a/docs/tutorials.md b/docs/tutorials.md new file mode 100644 index 0000000..384f1d6 --- /dev/null +++ b/docs/tutorials.md @@ -0,0 +1,16 @@ +This part of the project documentation focuses on a +**learning-oriented** approach. You'll learn how to +get started with the code in this project. + +> **Note:** Expand this section by considering the +> following points: + +- Help newcomers with getting started +- Teach readers about your library by making them + write code +- Inspire confidence through examples that work for + everyone, repeatably +- Give readers an immediate sense of achievement +- Show concrete examples, no abstractions +- Provide the minimum necessary explanation +- Avoid any distractions \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..cc785b7 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,14 @@ +site_name: Stock Tracker + +theme: + name: "material" + +plugins: + - mkdocstrings + +nav: + - Stock Tracker Docs: index.md + - tutorials.md + - How-To Guides: how-to-guides.md + - reference.md + - explanation.md diff --git a/models/forecasting-algorithms/ARIMA_forecast.py b/models/forecasting-algorithms/ARIMA_forecast.py deleted file mode 100644 index 77618ee..0000000 --- a/models/forecasting-algorithms/ARIMA_forecast.py +++ /dev/null @@ -1,188 +0,0 @@ -import json, argparse -from statsmodels.tsa.arima.model import ARIMA -from collections import deque -import pmdarima as pm -import numpy as np -import pandas as pd - -class ARIMAForecast: - - def __init__(self) -> None: - with open("data.txt", "r") as file: - raw_data = file.read() - raw_data = raw_data.replace("'", "\"") - data = json.loads(raw_data) - self.processed_data = [{"date": data_point["date"], "close": float(data_point["close"])} for data_point in data] - self.df = pd.DataFrame.from_dict(self.processed_data) - - - def find_nearest_date(self, date_offset, start_date, direction): - if direction == "backwards": - req_date = pd.to_datetime(start_date) - pd.DateOffset(days = date_offset) - elif direction == "forwards": - req_date = pd.to_datetime(start_date) + pd.DateOffset(days = date_offset) - - queue = deque([req_date]) - visited_dates = set() - last_available_date = self.processed_data[0]["date"] - while queue: - req_date = queue.popleft().strftime('%Y-%m-%d') - if req_date in visited_dates: - continue - idx = self.df[self.df.date == req_date].index.values - if idx.size > 0: - if (direction == "forwards" and req_date >= start_date) or (direction == "backwards" and req_date < start_date): - break - visited_dates.add(req_date) - queue.append(pd.to_datetime(req_date) - pd.DateOffset(days = 1)) - if req_date < last_available_date: - queue.append(pd.to_datetime(req_date) + pd.DateOffset(days = 1)) - - return start_date, req_date, idx[0] - - def slice_data(self, start_date, **kwargs): - final_idx = kwargs.get("final_idx", None) - start_idx = self.df[self.df.date == start_date].index.values[0] - if final_idx: - if final_idx > start_idx: - return self.df.iloc[start_idx:final_idx] - else: - return self.df.iloc[start_idx:final_idx:-1] - else: return self.df.iloc[:start_idx] - - def window_slice_optimisation(self, start_date): - best_results = {"AIC": float("inf"), "combination":{"p": 0, "d": 0, "q": 0}} - date_offset = 180 - curr_start_date = start_date - _, curr_end_date, curr_end_idx = self.find_nearest_date(3*365, curr_start_date, "forwards") - _, _, curr_goal_idx = self.find_nearest_date(date_offset, curr_end_date, "forwards") - curr_end_sliced_data = self.slice_data(curr_start_date, final_idx = curr_end_idx) - curr_goal_sliced_data = self.slice_data(curr_end_date, final_idx = curr_goal_idx) - - for p in range(0,4): - for d in range(0, 3): - for q in range(0, 4): - arima_model_manual = ARIMA(curr_end_sliced_data.close, order=(p, d, q), enforce_invertibility=False, enforce_stationarity=False) - model_manual = arima_model_manual.fit(method_kwargs={"warn_convergence": False}) - aic_value_manual = model_manual.aic - - if aic_value_manual < best_results["AIC"]: - best_results["AIC"] = float(aic_value_manual) - best_results["combination"]["p"] = p - best_results["combination"]["d"] = d - best_results["combination"]["q"] = q - p_manual, d_manual, q_manual = list(best_results["combination"].values()) - arima_model_manual = ARIMA(curr_end_sliced_data.close, order=(p_manual, d_manual, q_manual), enforce_invertibility=False, enforce_stationarity=False) - model_manual = arima_model_manual.fit(method_kwargs={"warn_convergence": False}) - try: - forecast_length = len(curr_goal_sliced_data) - forecasted_values_manual = pd.Series(model_manual.forecast(forecast_length), index=self.df.close[curr_end_idx:curr_goal_idx:-1].index) - actual_values = self.df.close[curr_end_idx:curr_goal_idx:-1] - Mean_Absolute_Percentage_Error_manual = np.mean(np.abs(forecasted_values_manual - actual_values)/np.abs(actual_values)) * 100 - - model_auto = pm.auto_arima(curr_end_sliced_data.close, seasonal=True, m=12) - (p_auto, d_auto, q_auto) = model_auto.get_params()["order"] - arima_model_auto = ARIMA(curr_end_sliced_data.close, order=(p_auto, d_auto, q_auto), enforce_invertibility=False, enforce_stationarity=False) - model_auto = arima_model_auto.fit(method_kwargs={"warn_convergence": False}) - forecast_length = len(curr_goal_sliced_data) - forecasted_values_auto = pd.Series(model_auto.forecast(forecast_length), index=self.df.close[curr_end_idx:curr_goal_idx:-1].index) - actual_values = self.df.close[curr_end_idx:curr_goal_idx:-1] - Mean_Absolute_Percentage_Error_auto = np.mean(np.abs(forecasted_values_auto - actual_values)/np.abs(actual_values)) * 100 - - return Mean_Absolute_Percentage_Error_manual, Mean_Absolute_Percentage_Error_auto - except ValueError as e: - return None, None - - def train_test_optimisation(self, backwards_duration): - best_results_trained_manual = {"AIC": float("inf"), "combination":{"p": 0, "d": 0, "q": 0}} - _, first_data_date, _ = self.find_nearest_date(backwards_duration, self.processed_data[0]["date"], "backwards") - sliced_data = self.slice_data(first_data_date) - train_value_index = len(sliced_data) * 8 // 10 - for p in range(0,4): - for d in range(0, 3): - for q in range(0, 4): - arima_model_manual = ARIMA(sliced_data.close[:train_value_index], order=(p, d, q), enforce_invertibility=False, enforce_stationarity=False) - model_manual = arima_model_manual.fit(method_kwargs={"warn_convergence": False}) - aic_value = model_manual.aic - if aic_value < best_results_trained_manual["AIC"]: - best_results_trained_manual["AIC"] = aic_value - best_results_trained_manual["combination"]["p"] = p - best_results_trained_manual["combination"]["d"] = d - best_results_trained_manual["combination"]["q"] = q - - p_manual, d_manual, q_manual = list(best_results_trained_manual["combination"].values()) - arima_model_manual = ARIMA(sliced_data.close[:train_value_index], order=(p_manual, d_manual, q_manual), enforce_invertibility=False, enforce_stationarity=False) - model_manual = arima_model_manual.fit(method_kwargs={"warn_convergence": False}) - forecasted_values_manual = pd.Series(model_manual.forecast(len(sliced_data) - train_value_index), - index=sliced_data.close[train_value_index:].index) - actual_values = sliced_data.close[train_value_index:] - - Mean_Absolute_Percentage_Error_manual = np.mean(np.abs(forecasted_values_manual - actual_values)/np.abs(actual_values)) * 100 - - model_auto = pm.auto_arima(sliced_data.close, seasonal=True, m=12) - (p_auto, d_auto, q_auto) = model_auto.get_params()["order"] - arima_model_auto = ARIMA(sliced_data.close[:train_value_index], order=(p_auto, d_auto, q_auto), enforce_invertibility=False, enforce_stationarity=False) - model_auto = arima_model_auto.fit(method_kwargs={"warn_convergence": False}) - forecasted_values_auto = pd.Series(model_auto.forecast(len(sliced_data) - train_value_index), - index=sliced_data.close[train_value_index:].index) - actual_values = self.df.close[train_value_index:] - Mean_Absolute_Percentage_Error_auto = np.mean(np.abs(forecasted_values_auto - actual_values)/np.abs(actual_values)) * 100 - - return Mean_Absolute_Percentage_Error_manual, Mean_Absolute_Percentage_Error_auto - - def generate_mape(self, start_date, slice_window, prediction_length, backwards_duration): - dates = [] - dates.append(start_date) - slice_window = eval(slice_window) - prediction_length = eval(prediction_length) - backwards_duration = eval(backwards_duration) - manual_series, auto_series, mape_manual, mape_auto = None, None, None, None - slice_final_date = self.find_nearest_date(slice_window, start_date, "forwards")[1] - slice_window_manual_mape_list, slice_window_auto_mape_list = [], [] - manual_result, auto_result = self.window_slice_optimisation(dates[-1]) - slice_window_manual_mape_list.append(manual_result) - slice_window_auto_mape_list.append(auto_result) - while slice_final_date < self.processed_data[0]["date"]: - start_date = self.find_nearest_date(prediction_length, dates[-1], "forwards")[1] - dates.append(start_date) - manual_result, auto_result = self.window_slice_optimisation(dates[-1]) - if manual_result and auto_result: - slice_window_manual_mape_list.append(manual_result) - slice_window_auto_mape_list.append(auto_result) - slice_final_date = self.find_nearest_date(slice_window, start_date, "forwards")[1] - - if slice_window_manual_mape_list[0] != None and slice_window_auto_mape_list[0] != None: - manual_series = pd.Series(slice_window_manual_mape_list) - auto_series = pd.Series(slice_window_auto_mape_list) - mape_manual = np.mean(manual_series) - mape_auto = np.mean(auto_series) - else: - print("Not enough data provided, please provide more data, or reduce the slice window or prediction length") - - try: - train_test_manual_mape, train_test_auto_mape = self.train_test_optimisation(backwards_duration) - except ValueError as e: - print("Data too short to split") - train_test_manual_mape, train_test_auto_mape = None, None - except IndexError as e: - print("Need more data points") - train_test_manual_mape, train_test_auto_mape = None, None - - - return(f"""Results:\n - sliced window manual mape: {mape_manual},\n - sliced window auto mape: {mape_auto},\n - train test manual mape: {train_test_manual_mape},\n - train test auto mape: {train_test_auto_mape}""") - -if __name__ == "__main__": - af = ARIMAForecast() - - parser = argparse.ArgumentParser(description='Finding Mean Absolute Percentage Error using two different ARIMA methods') - parser.add_argument('start_date', help='Provide date to start the slice, ensure date has data') - parser.add_argument('slice_window', help='The window size of the slice used for analysis, in days') - parser.add_argument('prediction_length', help='The number of data points to be predicted') - parser.add_argument('backwards_duration', help='How far back would the first data be, in days') - args = parser.parse_args() - - af.generate_mape(args.start_date, args.slice_window, args.prediction_length, args.backwards_duration) diff --git a/models/forecasting-algorithms/monte_carlo_forecast.py b/models/forecasting-algorithms/monte_carlo_forecast.py deleted file mode 100644 index e28970c..0000000 --- a/models/forecasting-algorithms/monte_carlo_forecast.py +++ /dev/null @@ -1,70 +0,0 @@ -import json, argparse -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt -from scipy.stats import norm - - -class MonteCarloForecast: - - def __init__(self) -> None: - with open("data.txt", "r") as file: - raw_data = file.read() - raw_data = raw_data.replace("'", "\"") - data = json.loads(raw_data) - self.processed_data = [{"date": data_point["date"], "close": float(data_point["close"])} for data_point in data] - self.df = pd.DataFrame.from_dict(self.processed_data) - - def generate_mape(self, days_to_test, days_to_predict, number_of_simulations, return_mode): - self.df.date = pd.to_datetime(mc.df.date) - daily_return = np.log(1 + self.df.close.pct_change()) - average_daily_return = daily_return.mean() - variance = daily_return.var() - drift = average_daily_return - (variance/2) - standard_deviation = daily_return.std() - days_to_test = eval(days_to_test) - days_to_predict = eval(days_to_predict) - number_of_simulations = eval(number_of_simulations) - predictions = np.zeros(days_to_test + days_to_predict) - predictions[0] = self.df.close[days_to_test + days_to_predict] - pred_collection = np.ndarray(shape=(number_of_simulations, days_to_test + days_to_predict)) - curr_mean_absolute_error = 0 - differences = np.array([]) - for sim_idx in range(0,number_of_simulations): - for prediction_idx in range(1, days_to_test + days_to_predict): - random_value = standard_deviation * norm.ppf(np.random.rand()) - predictions[prediction_idx] = predictions[prediction_idx - 1] * np.exp(drift + random_value) - pred_collection[sim_idx] = predictions - actual_values = self.df.close[:days_to_test] - predicted_values = predictions[:days_to_test] - curr_mean_absolute_error += np.mean(np.abs(predicted_values - actual_values) / np.abs(actual_values)) - if return_mode != "MAPE only": - difference_array = np.subtract(predicted_values, actual_values) - difference_value = np.sum(np.abs(difference_array)) - differences = np.append(differences, difference_value) - - if return_mode != "MAPE only": - best_fit = np.argmin(differences) - future_prices = pred_collection[best_fit][days_to_predict * -1:] - - - Mean_Absolute_Percentage_Error = curr_mean_absolute_error / number_of_simulations * 100 - if return_mode == "forecast only": - return future_prices - elif return_mode == "both": - return Mean_Absolute_Percentage_Error, future_prices - elif return_mode == "MAPE only": - return Mean_Absolute_Percentage_Error - - -if __name__ == "__main__": - mc = MonteCarloForecast() - - parser = argparse.ArgumentParser(description='Finding Mean Absolute Percentage Error using Monte Carlo Simulation') - parser.add_argument('days_to_test', help='Provide the number of days to test') - parser.add_argument('days_to_predict', help='Provide the number of days to predict') - parser.add_argument('number_of_simulations', help='Provide the number of simulations to run') - parser.add_argument('return_mode', help='Output to be returned, choose one of the modes: "forecast only", "both", or "MAPE only"') - args = parser.parse_args() - - mc.generate_mape(args.days_to_test, args.days_to_predict, args.number_of_simulations, args.return_mode) diff --git a/models/forecasting-algorithms/moving_average.py b/models/forecasting-algorithms/moving_average.py deleted file mode 100644 index e5b0428..0000000 --- a/models/forecasting-algorithms/moving_average.py +++ /dev/null @@ -1,106 +0,0 @@ -from parameters import mongodb_connection -from pymongo import MongoClient -import pandas as pd -import numpy as np -import argparse - - -class MovingAverage: - - def __init__(self, company) -> None: - client = MongoClient(mongodb_connection) - database = client.StockTracker - collection = database.Companies - projection = {"_id": 1, "price": 1} - cursor = collection.find({"_id": company}, projection) - for doc in cursor: - all_points = doc["price"] - self.dataset = [float(closing_price["close"]) for closing_price in all_points] - self.window_size = [window for window in range(10, 1000)] - self.smoothing_factor = [smoothing_factor / 10 for smoothing_factor in range(1, 10)] - self.sma_results = {} - self.sma_predictions = [] - self.ema_results = {} - self.ema_predictions = [] - self.best_results = {"algo": None, "MAPE": float("inf"), "window": None, "smoothing_factor": None} - self.mape = float("inf") - - def simple_moving_average(self, window): - dataset_length = len(self.dataset) - start, end = 0, window - curr_sum = sum(self.dataset[:end]) - actual_dataset, forecasted_dataset = [], [] - actual_data = self.dataset[end] - actual_dataset.append(actual_data) - forecasted_data = curr_sum / window - forecasted_dataset.append(forecasted_data) - for end in range(window + 1, dataset_length): - curr_sum = curr_sum + self.dataset[end - 1] - self.dataset[start] - start += 1 - actual_data = self.dataset[end] - actual_dataset.append(actual_data) - forecasted_data = curr_sum / window - forecasted_dataset.append(forecasted_data) - actual_dataset = pd.Series(actual_dataset) - forecasted_dataset = pd.Series(forecasted_dataset) - curr_mape = np.mean(np.abs(forecasted_dataset - actual_dataset)/np.abs(actual_dataset)) * 100 - self.sma_results[window] = { - "MAPE": curr_mape - } - if curr_mape < self.best_results["MAPE"]: - self.best_results["algo"] = "sma" - self.best_results["MAPE"] = curr_mape - self.best_results["window"] = window - self.best_results["smoothing_factor"] = None - return (curr_sum + self.dataset[end] - self.dataset[start]) / window - - def exponential_moving_average(self, smoothing_factor): - dataset_length = len(self.dataset) - total_percentage_error = 0 - first_data = self.dataset[0] - second_data = self.dataset[1] - actual_dataset, forecasted_dataset = [], [] - actual_dataset.append(second_data) - forecasted_dataset.append(first_data) - curr_error = second_data - first_data - total_percentage_error += (abs(curr_error) / second_data) * 100 - for end in range(2, dataset_length): - forecasted_value = smoothing_factor * second_data + (1 - smoothing_factor) * first_data - actual_data = self.dataset[end] - actual_dataset.append(actual_data) - forecasted_dataset.append(forecasted_value) - curr_error = forecasted_value - actual_data - total_percentage_error += (abs(curr_error) / actual_data) * 100 - first_data = forecasted_value - second_data = actual_data - actual_dataset = pd.Series(actual_dataset) - forecasted_dataset = pd.Series(forecasted_dataset) - curr_mape = np.mean(np.abs(forecasted_dataset - actual_dataset)/np.abs(actual_dataset)) * 100 - self.ema_results[smoothing_factor] = { - "MAPE": curr_mape - } - if curr_mape < self.best_results["MAPE"]: - self.best_results["algo"] = "ema" - self.best_results["MAPE"] = curr_mape - self.best_results["window"] = None - self.best_results["smoothing_factor"] = smoothing_factor - return smoothing_factor * second_data + (1 - smoothing_factor) * first_data - - def run_forecast(self): - for window in self.window_size: - forecasted_value = self.simple_moving_average(window) - self.sma_predictions.append(forecasted_value) - - for smoothing_factor in self.smoothing_factor: - forecasted_value = self.exponential_moving_average(smoothing_factor) - self.ema_predictions.append(forecasted_value) - - return self.sma_results, self.sma_predictions, self.ema_results, self.ema_predictions - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Finding Mean Absolute Percentage Error using two different moving averages') - parser.add_argument('company_name', help='Provide company name to analyse') - args = parser.parse_args() - ma = MovingAverage(args.company_name) - ma.run_forecast() diff --git a/models/forecasting-algorithms/proph_forecast.py b/models/forecasting-algorithms/proph_forecast.py deleted file mode 100644 index e2b09a3..0000000 --- a/models/forecasting-algorithms/proph_forecast.py +++ /dev/null @@ -1,45 +0,0 @@ -import pandas as pd -from prophet import Prophet -from parameters import mongodb_connection -from pymongo import MongoClient -import numpy as np -import argparse - -class ProphForecast: - - def __init__(self) -> None: - client = MongoClient(mongodb_connection) - database = client.StockTracker - collection = database.Companies - projection = {"_id": 1, "price": 1} - cursor = collection.find({"_id": "AAPL"}, projection) - for doc in cursor: - self.all_points = doc["price"] - - def generate_mape(self, days_to_test, days_to_predict): - days_to_test = eval(days_to_test) - days_to_predict = eval(days_to_predict) - df = pd.DataFrame.from_dict(self.all_points[:days_to_test][::-1]) - new_headers = {"date": "ds", - "close": "y"} - df.rename(columns=new_headers, - inplace=True) - m = Prophet() - m.fit(df) - future = m.make_future_dataframe(periods=days_to_predict) - forecast = m.predict(future) - actual_prices = pd.Series([float(price) for price in df["y"].values.tolist()]) - forecasted_prices = pd.Series([price[0] for price in forecast[["yhat"]].values.tolist()[:-1]]) - Mean_Absolute_Percentage_Error = np.mean(np.abs(forecasted_prices - actual_prices)/np.abs(actual_prices)) * 100 - return Mean_Absolute_Percentage_Error - - -if __name__ == "__main__": - pf = ProphForecast() - - parser = argparse.ArgumentParser(description='Finding Mean Absolute Percentage Error using Prophet Forecast') - parser.add_argument('days_to_test', help='Provide the number of days to test') - parser.add_argument('days_to_predict', help='Provide the number of days to predict') - args = parser.parse_args() - - pf.generate_mape(args.days_to_test, args.days_to_predict) diff --git a/models/model.py b/models/model.py deleted file mode 100644 index 71fdbf0..0000000 --- a/models/model.py +++ /dev/null @@ -1,51 +0,0 @@ -from os import walk -import csv - - -class Model: - """Processes data and returns data in required format""" - def __init__(self, parent=None): - super().__init__() - self.company_list = None - self.parent = parent - self.path: str = "../individual_stocks_5yr/" - self.generate_company_list() - - def generate_company_list(self) -> list: - """ - Returns a list of companies. - - :return: (list) A list of companies. - """ - self.company_list: list = [] - expected_headers = ["date", "close"] - for (dirpath, dirnames, filenames) in walk(self.path): - for file in filenames: - if file.endswith(".csv"): - if self.check_headers_and_data(file, expected_headers): - company_name: str = file[:-9] - self.company_list.append(company_name) - self.company_list.sort() - return self.company_list - - def check_headers_and_data(self, filename, expected_headers) -> bool: - """ - Checks if each csv file has the expected headers and at least one data point for each header - :param filename: (str) The name of the file being checked - :param expected_headers: (list) The list of headers required - :return: (bool) The results of the file - """ - has_expected_headers = False - has_data = False - - with open(self.path + filename, 'r', newline='') as csvfile: - reader = csv.DictReader(csvfile) - for row in reader: - for header in expected_headers: - if row.get(header): - has_expected_headers = True - has_data = True - break - if has_expected_headers and has_data: - break - return has_expected_headers and has_data diff --git a/processing/data_processing.py b/processing/data_processing.py deleted file mode 100644 index 82064f5..0000000 --- a/processing/data_processing.py +++ /dev/null @@ -1,31 +0,0 @@ -import pandas as pd -from models import Model - - -class DataProcessing: - """Accesses csv files directory and converts the csv files to dictionaries""" - - def __init__(self, parent=None): - super().__init__() - self.data = None - self.parent = parent - self.path: str = "../individual_stocks_5yr/" - self.companies_list = Model().company_list - self.companies_data = self.process_data() - - def process_data(self) -> pd.DataFrame: - """ - Slices the data as required. - - :return: (DataFrame) A DataFrame containing required information of all companies. - """ - companies_data: dict = {} - for company in self.companies_list: - csv_file: str = f"{self.path}{company}_data.csv" - df: pd.DataFrame = pd.read_csv( - csv_file, header=0, usecols=["date", "close"] - ) - modified_data: dict = df.to_dict("list") - companies_data[company] = modified_data - self.data = pd.DataFrame(companies_data) - return self.data diff --git a/requirements.txt b/requirements.txt index a2b8b8a..d9b273b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,36 +1,47 @@ -bokeh==3.1.1 +appdirs==1.4.4 +asgiref==3.7.2 +beautifulsoup4==4.12.3 certifi==2023.5.7 charset-normalizer==3.1.0 -contourpy==1.0.7 -cycler==0.11.0 +coverage==7.4.3 +Django==4.1.12 +django-admin-volt==1.0.10 exceptiongroup==1.1.1 -fonttools==4.42.1 +frozendict==2.4.0 +gunicorn==21.2.0 +html5lib==1.1 idna==3.4 iniconfig==2.0.0 -Jinja2==3.1.2 -kiwisolver==1.4.5 -MarkupSafe==2.1.2 -matplotlib==3.7.2 -numpy==1.24.3 +install==1.3.5 +jmespath==1.0.1 +lxml==4.9.3 +multitasking==0.0.11 +mypy==1.8.0 +mypy-extensions==1.0.0 +numpy==1.26.4 packaging==23.1 -pandas==2.0.1 -Pillow==9.5.0 +pandas==2.2.1 +pandas-stubs==2.2.0.240218 +peewee==3.17.1 pluggy==1.0.0 -pyparsing==3.0.9 -PyQt5==5.15.9 -PyQt5-Qt5==5.15.2 -PyQt5-sip==12.12.1 -pyqtgraph==0.13.3 -PyQtWebEngine==5.15.6 -PyQtWebEngine-Qt5==5.15.2 pytest==7.3.1 +pytest-cov==4.1.0 python-dateutil==2.8.2 +python-dotenv==1.0.0 pytz==2023.3 -PyYAML==6.0 requests==2.31.0 +ruff==0.3.0 six==1.16.0 +soupsieve==2.5 +sqlparse==0.4.4 tomli==2.0.1 -tornado==6.3.2 +types-openpyxl==3.1.0.20240301 +types-pytz==2024.1.0.20240203 +types-requests==2.31.0.20240218 +typing_extensions==4.8.0 tzdata==2023.3 urllib3==2.0.2 -xyzservices==2023.5.0 +utils==1.0.1 +webencodings==0.5.1 +whitenoise==6.5.0 +yfinance==0.2.37 diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/company_selection.py b/src/company_selection.py deleted file mode 100644 index e75da64..0000000 --- a/src/company_selection.py +++ /dev/null @@ -1,40 +0,0 @@ -from PyQt5.QtWidgets import QWidget, QComboBox -from graph_plotting import GraphPlotting -from live_price_display import LivePriceDisplay -from news_display import NewsDisplay -from model import Model - -class CompanySelection(QWidget): - """Displays and handles company selection menu""" - - def __init__(self, parent=None): - super().__init__() - self.parent = parent - self.combo_box = QComboBox() - self.combo_box.setMaximumSize(700, 50) - self.placeholder_text: str = "Select a company" - self.combo_box.addItem(self.placeholder_text) - - models = Model() - # Populate the combo box with available company names from csv files - for company in models.company_list: - self.combo_box.addItem(company) - - # Create instances of widgets for graph plotting, live price display, and news display - self.graph_plotting_widget: GraphPlotting = GraphPlotting() - self.live_price_display_widget: LivePriceDisplay = LivePriceDisplay() - self.news_display_widget: NewsDisplay = NewsDisplay() - self.combo_box.currentTextChanged.connect(lambda: self.company_selected()) - - # Connect the combo box's currentTextChanged signal to update the selected company in the widgets - def company_selected(self) -> None: - """ - If the selected company is not equal to the placeholder text, it will obtain information of the selected company - , if it is then it will not do anything. - - :return: None - """ - if self.combo_box.currentText() != self.placeholder_text: - self.graph_plotting_widget.plot_selected_graph(self.combo_box.currentText()) - self.live_price_display_widget.display_final_price(self.combo_box.currentText()) - self.news_display_widget.display_company_news(self.combo_box.currentText()) diff --git a/src/graph_plotting.py b/src/graph_plotting.py deleted file mode 100644 index a693d60..0000000 --- a/src/graph_plotting.py +++ /dev/null @@ -1,85 +0,0 @@ -from PyQt5.QtWidgets import QWidget, QVBoxLayout -from PyQt5.QtWebEngineWidgets import QWebEngineView -from bokeh.plotting import figure -from bokeh.resources import CDN -from bokeh.embed import file_html -from bokeh.models import ColumnDataSource -from data_processing import DataProcessing - -import numpy as np -import pandas as pd - -PLOT_HEIGHT = 700 -PLOT_WIDTH = 1100 - - -class GraphPlotting(QWidget): - """Plots graph from available data""" - web_view: QWebEngineView - - def __init__(self, parent=None): - super().__init__() - self.web_view: str = None - self.parent = parent - self.vbox_left = QVBoxLayout() - self.placeholder_graph() - self.data_processor = DataProcessing() - - def placeholder_graph(self) -> None: - """ - Creates a placeholder graph using Bokeh. - - :return: None - """ - p = figure() - p.line([1, 2, 3, 4, 5], [5, 4, 3, 2, 1]) - - # Convert the Bokeh plot to HTML - html: str = file_html(p, CDN, "my plot") - - # Create a web view widget to display the Bokeh plot - self.web_view = QWebEngineView() - self.web_view.setHtml(html) - - self.vbox_left.addWidget(self.web_view) - - @staticmethod - def plotting_data(filename: dict) -> str: - """ - Obtains the data of the selected company. - - :param filename: (dict) Data of the selected company. - :return: (str) Html code for graph plotting. - """ - dates: np.ndarray = np.array(filename["date"], dtype=np.datetime64) - source: ColumnDataSource = ColumnDataSource(data=dict(date=dates, close=filename["close"])) - p = figure(height=PLOT_HEIGHT, width=PLOT_WIDTH, tools="xpan, hover", toolbar_location=None, - x_axis_type="datetime", x_axis_location="below", x_range=(dates[250], dates[-1]), - background_fill_color="#efefef") - - p.line("date", "close", source=source) - p.yaxis.axis_label = "Price" - - html = file_html(p, CDN, "my_plot") - - return html - - def plot_selected_graph(self, company_name: str) -> None: - """ - Plots selected company. - - :param company_name: (str) The name of the selected company - :return: None - """ - data: dict = self.data_processor.companies_data[company_name].to_dict() - html = self.plotting_data(data) - - if hasattr(self, 'web_view'): - self.vbox_left.removeWidget(self.web_view) - self.web_view.deleteLater() - - # Create a web view widget to display the Bokeh plot - self.web_view = QWebEngineView() - self.web_view.setHtml(html) - - self.vbox_left.addWidget(self.web_view) diff --git a/src/live_price_display.py b/src/live_price_display.py index 62ff417..cd8026c 100644 --- a/src/live_price_display.py +++ b/src/live_price_display.py @@ -1,72 +1,95 @@ -from PyQt5.QtWidgets import QWidget, QLabel -from PyQt5.QtGui import QFont -from PyQt5 import QtCore +"""This module returns the most recent price of the selected company.""" + +from typing import Union, Any import requests +import yfinance as yf # type: ignore[import-not-found] # type: ignore[import-untyped] # pylint: disable=E0401 +import pandas as pd -from processing.data_processing import DataProcessing -from parameters import ALPHA_VANTAGE_API_KEY, mongodb_connection +from src.parameters import ALPHA_VANTAGE_API_KEY # type: ignore[attr-defined] ALPHA_VANTAGE_ENDPOINT = "https://www.alphavantage.co/query" -class LivePriceDisplay(QWidget): - """Shows the live prices""" - - def __init__(self, parent=None): - super().__init__() - self.price = None - self.parent = parent - self.share_price_label = QLabel("Live price") - self.share_price_label.setMaximumSize(700, 500) - self.share_price_label.setFont(QFont("Times", 24)) - self.share_price_label.setAlignment(QtCore.Qt.AlignCenter) +class LivePriceDisplay: + """ + Returns the most recent price of the selected company. + """ - def display_final_price(self, company_name: str) -> None: + @staticmethod + def display_final_price_av(company_name: str) -> Union[str, dict, Any]: """ - Attempts to display the final price of the selected company. + Returns a the price using Alpha Vantage. - :param company_name: (str) The name of the name to display the final price for. - :return: - """ + Args: + company_name: The ticker symbol of the company + Returns: + The most recent price in string + """ try: # Gets last available price by default price_params: dict = { "apikey": ALPHA_VANTAGE_API_KEY, "function": "TIME_SERIES_DAILY", - "symbol": company_name + "symbol": company_name, } - price_response: requests.models.Response = requests.get(ALPHA_VANTAGE_ENDPOINT, params=price_params) + price_response: requests.models.Response = requests.get( + ALPHA_VANTAGE_ENDPOINT, params=price_params, timeout=20 + ) if price_response.ok: response_data: dict = price_response.json() if "Time Series (Daily)" in response_data: price_list: dict = response_data["Time Series (Daily)"] most_recent_day: str = next(iter(price_list)) - self.price: float = price_list[most_recent_day]["4. close"] + return price_list[most_recent_day]["4. close"] + return response_data + return price_response + + except ( + requests.exceptions.MissingSchema, + requests.RequestException, + KeyError, + IndexError, + ): + return "Error fetching price" - except (requests.RequestException, KeyError, IndexError): - self.price: str = "Error fetching price" + @staticmethod + def display_final_price_yf(company_name: str) -> Union[float, str]: + """ + Returns a the price using Yahoo Finance. + + Args: + company_name: The ticker symbol of the company + + Returns: + The most recent price in string + """ + try: + df: pd.DataFrame = yf.download(company_name) # pylint: disable=C0103 + price: float = df.iloc[-1]["Close"] + return round(price, 5) + except IndexError: + return "Error fetching price" - self.share_price_label.setText(f"{company_name}:\n{self.price}") -from pymongo import MongoClient -client = MongoClient(mongodb_connection) -database = client.StockTracker -collection = database.Companies -projection = {"_id": 0, "name": 1, "price": 1} +# from pymongo import MongoClient +# client = MongoClient(mongodb_connection) +# database = client.StockTracker +# collection = database.Companies +# projection = {"_id": 0, "name": 1, "price": 1} # cursor = collection.find({"name": "MSFT"}, projection) # for doc in cursor: # latest_date = doc["price"][0]["date"] # print(latest_date) -symbols = ["AAPL", "MSFT", "AMZN", "GOOGL", "NVDA"]# "TSLA", "GOOG", "BRK.B", "META", "UNH" -for symbol in symbols: - price_params: dict = { - "apikey": ALPHA_VANTAGE_API_KEY, - "function": "TIME_SERIES_DAILY", - "symbol": symbol, - "outputsize": "full" - } - a = requests.get(ALPHA_VANTAGE_ENDPOINT, params=price_params).json() - company = {"_id": symbol, "price":[{"date": b, "close": a["Time Series (Daily)"][b]["4. close"]} for b in a["Time Series (Daily)"]]} - result = collection.insert_one(company) - print(f"Inserted document ID: {result.inserted_id}") +# symbols = ["AAPL", "MSFT", "AMZN", "GOOGL", "NVDA"]# "TSLA", "GOOG", "BRK.B", "META", "UNH" +# for symbol in symbols: +# price_params: dict = { +# "apikey": ALPHA_VANTAGE_API_KEY, +# "function": "TIME_SERIES_DAILY", +# "symbol": symbol, +# "outputsize": "full" +# } +# a = requests.get(ALPHA_VANTAGE_ENDPOINT, params=price_params).json() +# company = {"_id": symbol, "price":[{"date": b, "close": a["Time Series (Daily)"][b]["4. close"]} for b in a["Time Series (Daily)"]]} # pylint: disable=C0301 +# result = collection.insert_one(company) +# print(f"Inserted document ID: {result.inserted_id}") diff --git a/src/main.py b/src/main.py deleted file mode 100644 index 80fc450..0000000 --- a/src/main.py +++ /dev/null @@ -1,40 +0,0 @@ -from PyQt5.QtWidgets import QMainWindow, QWidget, QVBoxLayout, QApplication, QHBoxLayout -import sys - -from company_selection import CompanySelection - - -class MainWindow(QMainWindow): - """Main GUI window""" - - def __init__(self, parent=None): - super().__init__() - self.parent = parent - self.setWindowTitle("Stock Viewer") - self.setGeometry(100, 100, 800, 600) - self.showMaximized() - - central_widget = QWidget() - self.setCentralWidget(central_widget) - - vbox_right = QVBoxLayout() - - cs: CompanySelection = CompanySelection() - - vbox_right.addWidget(cs.combo_box) - vbox_right.addWidget(cs.live_price_display_widget.share_price_label) - vbox_right.addWidget(cs.news_display_widget.company_news_section) - - window_layout = QHBoxLayout() - - window_layout.addLayout(cs.graph_plotting_widget.vbox_left) - window_layout.addLayout(vbox_right) - - central_widget.setLayout(window_layout) - - -if __name__ == '__main__': - app = QApplication(sys.argv) - window = MainWindow() - window.show() - app.exec_() diff --git a/src/model.py b/src/model.py new file mode 100644 index 0000000..0b696a7 --- /dev/null +++ b/src/model.py @@ -0,0 +1,106 @@ +"""This module reads csv data files and processes them into the required format""" + +from typing import Union +import os +import pandas as pd + + +class Model: + """Processes data and returns data in required format""" + + def __init__(self) -> None: + self.path: str = ( + "../individual_stocks_5yr/" + ) + + def generate_company_list(self) -> list: + """ + Returns a list of companies. + + :return: (list) A list of companies. + """ + company_list: list = [] + expected_headers: list = ["date", "close"] + for _, _, filenames in os.walk(self.path): + for file in filenames: + if file.endswith(".csv"): + if self.check_headers_and_data(file, expected_headers): + company_name: str = file[:-9] + company_list.append(company_name) + company_list.sort() + return company_list + + def check_headers_and_data(self, file, expected_headers) -> bool: + """ + Checks if each csv file has the expected headers and at least one data point for each header + :param filename: (str) The name of the file being checked + :param expected_headers: (list) The list of headers required + :return: (bool) The results of the file + """ + has_expected_headers: bool = False + has_data: bool = False + try: + parse_dates: list = ["date"] + df: pd.DataFrame = pd.read_csv( # pylint: disable=C0103 + self.path + file, + skip_blank_lines=True, + dtype={"date": "string", "close": "float64"}, + parse_dates=parse_dates, + ) + headers: set = set(df.columns.to_list()) + expected_headers_copy: list = expected_headers[:] + # Two conditions the while loop should break: + # 1. No more headers in expected_headers_copy (all are met) + # 2. At least one header is not met + while expected_headers_copy: + if expected_headers_copy[0] in headers: + expected_headers_copy.pop(0) + else: + break + if not expected_headers_copy: + has_expected_headers = True + else: + return False + except pd.errors.EmptyDataError: + return False + try: + df.iloc[[0]] # pylint: disable=E1101,W0104 + has_data = True + except (ValueError, IndexError, NameError): + return False + return has_expected_headers and has_data + + def process_data(self, expected_headers: list) -> Union[pd.DataFrame, str]: + """ + Slices the data as required. + + :return: (DataFrame) A DataFrame containing required information of all companies. + """ + companies_list: list = self.generate_company_list() + companies_data: dict = {} + try: + for company in companies_list: + csv_file: str = f"{self.path}{company}_data.csv" + parse_dates: list = ["date"] + df: pd.DataFrame = pd.read_csv( # pylint: disable=C0103 + csv_file, + header=0, + usecols=expected_headers, + skip_blank_lines=True, + dtype={"date": "string", "close": "float64"}, + parse_dates=parse_dates, + ) + df.dropna(how="all", subset="date", inplace=True) + df.interpolate(method="linear", inplace=True) # pylint: disable=E1101 + df["date"] = pd.to_datetime(df["date"]) + df["date"] = df["date"].dt.strftime("%Y-%m-%d") + df["close"] = pd.to_numeric(df["close"]) + modified_data: dict = df.to_dict("list") + companies_data[company] = modified_data + all_companies_data: pd.DataFrame = pd.DataFrame(companies_data) + return all_companies_data + except (ValueError, TypeError, KeyError): + return ( + "Please ensure each header is unique, data is correct, " + "or expected_headers and process_data are configured correctly" + ) diff --git a/src/news_display.py b/src/news_display.py index 0bb39d6..553875b 100644 --- a/src/news_display.py +++ b/src/news_display.py @@ -1,66 +1,59 @@ -from PyQt5.QtWidgets import QWidget, QVBoxLayout, QGroupBox, QLabel -from PyQt5.QtGui import QFont +"""This module displays the most recent news of the selected company if available""" + import requests -from parameters import NEWS_API_KEY +from src.parameters import NEWS_API_KEY # type: ignore[attr-defined] NEWS_ENDPOINT = "https://newsapi.org/v2/everything" -class NewsDisplay(QWidget): - """Obtains and handles recent news""" - - def __init__(self, parent=None): - super().__init__() - self.parent = parent - self.vbox_news = QVBoxLayout() - self.company_news_section = QGroupBox("Live news") - self.company_news_section.setMaximumSize(700, 500) - self.company_news_section.setFont(QFont("Times", 9)) - - def display_company_news(self, company_name: str) -> None: - """ - Display recent news related to a specific company. - - :param company_name: (str) The name of the company for which news is displayed. - :return: None - """ - company_news: list = self.collect_news(company_name=company_name) - - # Remove news from previously selected company, if any - while self.vbox_news.count(): - item = self.vbox_news.takeAt(0) - widget = item.widget() - if widget is not None: - widget.deleteLater() - - # Add news from currently selected company, if any - for headline in company_news: - news_label = QLabel(headline) - news_label.setWordWrap(True) - self.vbox_news.addWidget(news_label) - - self.company_news_section.setLayout(self.vbox_news) +class NewsDisplay: + """ + Returns the most recent news of the selected company, if any. + """ @staticmethod - def collect_news(company_name: str) -> list: + def _collect_news(company_name: str) -> list: """ - Collect recent news articles related to a specific company and format them. + Collect recent news articles related to the selected company and format them. + Args: + company_name: The ticker symbol of the company - :param company_name: (str) The name of the company to collect news for. - :return: (list) A list of formatted news headlines with respective URLs. + Returns: + five_article: The most recent five articles """ - news_params: dict = { - "apiKey": NEWS_API_KEY, - "qInTitle": company_name - } + news_params: dict = {"apiKey": NEWS_API_KEY, "qInTitle": company_name} - news_response: requests.models.Response = requests.get(NEWS_ENDPOINT, params=news_params) + news_response: requests.models.Response = requests.get( + NEWS_ENDPOINT, params=news_params, timeout=20 + ) articles: list = news_response.json()["articles"] five_articles: list = articles[:5] + return five_articles + + def format_news_pyqt(self, company_name: str) -> list: + """ + Formats the collected news to suit different PyQt5 UI. + Args: + company_name: The ticker symbol of the company - # Generate formatted headlines with clickable URLs + Returns: + five_article: The most recent five articles + """ + news: list = self._collect_news(company_name) return [ f"{article['title']}: ''{article['url']}''" - for article in five_articles + for article in news ] + + def format_news_django(self, company_name: str) -> list: + """ + Formats the collected news to suit different django UI. + Args: + company_name: The ticker symbol of the company + + Returns: + five_article: The most recent five articles + """ + news: list = self._collect_news(company_name) + return [{"title": article["title"], "url": article["url"]} for article in news] diff --git a/src/parameters.py b/src/parameters.py new file mode 100644 index 0000000..c901961 --- /dev/null +++ b/src/parameters.py @@ -0,0 +1,3 @@ +"This module stores the required keys." +NEWS_API_KEY = "" +ALPHA_VANTAGE_API_KEY = "" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/sample_data/CompanyA_data.csv b/tests/sample_data/CompanyA_data.csv deleted file mode 100644 index 95b3797..0000000 --- a/tests/sample_data/CompanyA_data.csv +++ /dev/null @@ -1,2 +0,0 @@ -date,open,volume,close -12/08/2021,12,2000,13 diff --git a/tests/sample_data/CompanyB_data.csv b/tests/sample_data/CompanyB_data.csv deleted file mode 100644 index 505f3ff..0000000 --- a/tests/sample_data/CompanyB_data.csv +++ /dev/null @@ -1,2 +0,0 @@ -date,,,close -12/08/2021,,,13 diff --git a/tests/sample_data/CompanyC_data.csv b/tests/sample_data/CompanyC_data.csv deleted file mode 100644 index a24f91f..0000000 --- a/tests/sample_data/CompanyC_data.csv +++ /dev/null @@ -1,2 +0,0 @@ -date,close -12/08/2021,13 diff --git a/tests/sample_data/CompanyD_data.csv b/tests/sample_data/CompanyD_data.csv deleted file mode 100644 index 19daa0b..0000000 --- a/tests/sample_data/CompanyD_data.csv +++ /dev/null @@ -1,2 +0,0 @@ -date,open,high,low ,close -12/08/2021,12,14,11,13 diff --git a/tests/sample_data/CompanyF_data.xlsx b/tests/sample_data/CompanyF_data.xlsx deleted file mode 100644 index ab28378..0000000 Binary files a/tests/sample_data/CompanyF_data.xlsx and /dev/null differ diff --git a/tests/test_data_processing.py b/tests/test_data_processing.py deleted file mode 100644 index 89576d0..0000000 --- a/tests/test_data_processing.py +++ /dev/null @@ -1,39 +0,0 @@ -import unittest -import sys - -pkg_dir = "../src" -sys.path.append(pkg_dir) - -from src import data_processing, model - - -class MyDataProcessingTestCase(unittest.TestCase): - - def test_process_data(self): - """ - Test the data processing functionality. - - This test case checks if the data processing works correctly by comparing - the number of data points generated for each company. - - :return: None - """ - - data_processor = data_processing.DataProcessing() - model_test = model.Model() - model_test.path = "sample_data/" - company_list = model_test.generate_company_list() - - data_processor.companies_list = company_list - data_processor.path = "sample_data/" - test_data = data_processor.process_data() - - companies_passed = [] - for company in company_list: - if len(test_data[company]) == 2: - companies_passed.append(company) - self.assertEqual(company_list, companies_passed) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_live_price_display.py b/tests/test_live_price_display.py new file mode 100644 index 0000000..9ed3c46 --- /dev/null +++ b/tests/test_live_price_display.py @@ -0,0 +1,51 @@ +from unittest.mock import patch, Mock +from src.live_price_display import LivePriceDisplay +import pytest + +@pytest.fixture +def test_live_price_display(): + yield LivePriceDisplay() + +@patch("src.live_price_display.LivePriceDisplay.display_final_price_av") +def test_mock_display_final_price_av(mock_get, test_live_price_display): + mock_get.return_value = 123.45 + results = test_live_price_display.display_final_price_av("AAPL") + assert isinstance(results, float) + assert results == 123.45 + +@patch("requests.get") +def test_mock_display_final_price_av_price_response_not_ok(mock_get, test_live_price_display): + mock_get.return_value = "" + mock_response = Mock() + mock_response.ok = False + mock_response.response = mock_get.return_value + results = mock_response.response + assert mock_response.ok == False + assert isinstance(results, str) + assert results == "" + +@patch("src.live_price_display.LivePriceDisplay.display_final_price_av") +def test_display_final_price_av_exception(mock_get, test_live_price_display): + mock_get.return_value = "Error fetching price" + result = test_live_price_display.display_final_price_av("Unknown_Company") + assert result == "Error fetching price" + +@patch("src.live_price_display.LivePriceDisplay.display_final_price_av") +def test_display_final_price_av_invalid_api_call(mock_get, test_live_price_display): + mock_get.return_value = {"Error Message": + "Invalid API call. Please retry or visit the documentation (https://www.alphavantage.co/documentation/) for TIME_SERIES_DAILY."} + result = test_live_price_display.display_final_price_av("Unknown_Company") + assert "Error Message" in result + +@patch("src.live_price_display.LivePriceDisplay.display_final_price_yf") +def test_mock_display_final_price_yf(mock_get, test_live_price_display): + mock_get.return_value = 291.97 + results = test_live_price_display.display_final_price_yf("AAPL") + assert isinstance(results, float) + assert results == 291.97 + +@patch("src.live_price_display.LivePriceDisplay.display_final_price_yf") +def test_display_final_price_yf_exception(mock_get, test_live_price_display): + mock_get.return_value = "Error fetching price" + result = test_live_price_display.display_final_price_yf("Unknown_Company") + assert result == "Error fetching price" diff --git a/tests/test_model.py b/tests/test_model.py index da4a463..8283e73 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -1,23 +1,187 @@ -import unittest +import pytest from src import model +from unittest.mock import patch, Mock +import pandas as pd -class MyModelTestCase(unittest.TestCase): +@pytest.fixture +def test_model(): + yield model.Model() - def test_generate_company_list(self): - """ - Test the model functionality. +@patch("src.model.Model.check_headers_and_data") +@patch("os.walk") +def test_generate_company_list(mock_walk, mock_check_headers_and_data, test_model): + mock_walk.return_value = [(" ", " ", ['CompanyB_data.csv', 'CompanyE_data.xml', 'CompanyA_data.csv', + 'CompanyF_data.xlsx', 'CompanyD_data.csv', 'CompanyC_data.csv', + 'CompanyD_data.txt'])] + mock_check_headers_and_data.return_value = True + company_list = test_model.generate_company_list() + assert company_list == ["CompanyA", "CompanyB", "CompanyC", "CompanyD"] - This test case checks if the model works correctly by comparing - the final list of companies with the expected list of companies. +@pytest.fixture +def test_headers(): + yield ["date", "close"] - :return: None - """ - model_test = model.Model() - model_test.path = "sample_data/" - company_list = model_test.generate_company_list() +@pytest.fixture +def dummy_data_all_true(): + data = [["date", "close"], + ["2023-12-12", 219.87]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df - self.assertEqual(company_list, ["CompanyA", "CompanyB", "CompanyC", "CompanyD"]) +@patch("pandas.read_csv") +def test_check_headers_and_data_all_true(mock_read_csv, test_model, dummy_data_all_true): + mock_read_csv.return_value = dummy_data_all_true + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == True +@pytest.fixture +def dummy_data_expected_headers_not_met(): + data = [["date", "open"], + ["2023-12-12", 219.87]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df -if __name__ == '__main__': - unittest.main() \ No newline at end of file +@patch("pandas.read_csv") +def test_check_headers_and_data_false_headers_true_data(mock_read_csv, test_model, dummy_data_expected_headers_not_met): + mock_read_csv.return_value = dummy_data_expected_headers_not_met + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == False + +@pytest.fixture +def dummy_data_only_headers(): + data = [["date", "close"]] + df = pd.DataFrame(columns=data[0]) + yield df + +@patch("pandas.read_csv") +def test_check_headers_and_data_only_headers(mock_read_csv, test_model, dummy_data_only_headers): + mock_read_csv.return_value = dummy_data_only_headers + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == False + +@pytest.fixture +def dummy_data_only_data(): + data = [["2023-12-12", 219.87]] + df = pd.DataFrame(data[0]) + yield df + +@patch("pandas.read_csv") +def test_check_headers_and_data_only_data(mock_read_csv, test_model, dummy_data_only_data): + mock_read_csv.return_value = dummy_data_only_data + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == False + +@pytest.fixture +def dummy_data_empty_csv(): + df = pd.DataFrame() + yield df + +@patch("pandas.read_csv") +def test_check_headers_and_data_empty_csv(mock_read_csv, test_model, dummy_data_empty_csv): + mock_read_csv.return_value = dummy_data_empty_csv + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == False + +@pytest.fixture +def dummy_data_additional_headers(): + data = [["date", "open", "close", "high", "low"], + ["2023-12-12", 219.87, 123, 245, 112]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df + +@patch("pandas.read_csv") +def test_check_headers_and_data_additional_headers(mock_read_csv, test_model, dummy_data_additional_headers): + mock_read_csv.return_value = dummy_data_additional_headers + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == True + +@pytest.fixture +def dummy_data_different_order_headers(): + data = [["close", "open", "date", "high", "low"], + [123, 219.87, "2023-12-12", 245, 112]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df + +@patch("pandas.read_csv") +def test_check_headers_and_data_different_order_headers(mock_read_csv, test_model, dummy_data_different_order_headers): + mock_read_csv.return_value = dummy_data_different_order_headers + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == True + +@pytest.fixture +def dummy_data_wrong_position_headers(): + data = [["2023-12-12", 219.87], + ["date", "close"]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df + +@patch("pandas.read_csv") +def test_check_headers_and_data_wrong_position_headers(mock_read_csv, test_model, dummy_data_wrong_position_headers): + mock_read_csv.return_value = dummy_data_wrong_position_headers + expected_headers = ["date", "close"] + results = test_model.check_headers_and_data("", expected_headers=expected_headers) + assert results == False + +@patch("src.model.Model.generate_company_list") +@patch("pandas.read_csv") +def test_process_data_all_true(mock_read_csv, mock_company_list, test_model, dummy_data_all_true): + mock_company_list.return_value = ["Company_A"] + mock_read_csv.return_value = dummy_data_all_true + results = test_model.process_data(["date", "close"]) + assert isinstance(results, pd.DataFrame) + assert (results["Company_A"]["date"] == ["2023-12-12"] and results["Company_A"]["close"] == [219.87]) + +@pytest.fixture +def dummy_data_repeated_headers_date(): + data = [["date", "date", "close"], + ["2023-12-12", "2023-12-11", 219.87]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df + +@patch("src.model.Model.generate_company_list") +@patch("pandas.read_csv") +def test_process_data_repeated_headers_date_value_error(mock_read_csv, mock_company_list, test_model, dummy_data_repeated_headers_date): + mock_company_list.return_value = ["Company_A"] + mock_read_csv.return_value = dummy_data_repeated_headers_date + results = test_model.process_data(["date", "close"]) + assert isinstance(results, str) + assert results == "Please ensure each header is unique, data is correct, or expected_headers and process_data are configured correctly" + +@pytest.fixture +def dummy_data_repeated_headers_close(): + data = [["date", "close", "close"], + ["2023-12-12", 219.87, 912.87]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df + +@patch("src.model.Model.generate_company_list") +@patch("pandas.read_csv") +def test_process_data_repeated_headers_close_type_error(mock_read_csv, mock_company_list, test_model, dummy_data_repeated_headers_close): + mock_company_list.return_value = ["Company_A"] + mock_read_csv.return_value = dummy_data_repeated_headers_close + results = test_model.process_data(["date", "close"]) + assert isinstance(results, str) + assert results == "Please ensure each header is unique, data is correct, or expected_headers and process_data are configured correctly" + +@pytest.fixture +def dummy_data_key_error(): + data = [["date", "date"], + ["2023-12-12", "2023-12-12"]] + df = pd.DataFrame(data[1:], columns=data[0]) + yield df + +@patch("src.model.Model.generate_company_list") +@patch("pandas.read_csv") +def test_process_data_key_error(mock_read_csv, mock_company_list, test_model, dummy_data_key_error): + mock_company_list.return_value = ["Company_A"] + mock_read_csv.return_value = dummy_data_key_error + results = test_model.process_data(["close", "close"]) + assert isinstance(results, str) + assert results == "Please ensure each header is unique, data is correct, or expected_headers and process_data are configured correctly" diff --git a/tests/test_news_display.py b/tests/test_news_display.py new file mode 100644 index 0000000..52bd4a7 --- /dev/null +++ b/tests/test_news_display.py @@ -0,0 +1,43 @@ +import pytest +from unittest.mock import patch +from src.news_display import NewsDisplay + +@pytest.fixture +def test_news_display(): + yield NewsDisplay() + +@patch("src.news_display.NewsDisplay._collect_news") +def test_collect_news(mock_get, test_news_display): + mock_get.return_value = [ + {'title': 'Some headlines', + 'author': None, + 'source': { + 'id': None, + 'name': 'website name' + }, + 'publishedAt': 'timestamp', + 'url': 'url link' + } + ] + results = test_news_display._collect_news() + assert isinstance(results, list) + assert ("title" in results[0] and results[0]["title"] is not None + and "url" in results[0] and results[0]["url"] is not None) + +@patch("src.news_display.NewsDisplay.format_news_pyqt") +def test_format_news_pyqt(mock_get, test_news_display): + mock_get.return_value = [ + f"'title': '''url'''" + ] + results = test_news_display.format_news_pyqt() + assert isinstance(results, list) + assert "title" in results[0] and "href" in results[0] + +@patch("src.news_display.NewsDisplay.format_news_django") +def test_format_news_django(mock_get, test_news_display): + mock_get.return_value = [{"title":"title", "url":"url"}] + results = test_news_display.format_news_django() + assert isinstance(results, list) + assert isinstance(results[0], dict) + assert ("title" in results[0] and results[0]["title"] is not None + and "url" in results[0] and results[0]["url"] is not None) \ No newline at end of file