diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 0000000..43344f5
Binary files /dev/null and b/.DS_Store differ
diff --git a/.dockerignore b/.dockerignore
index 88012b2..ca84938 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,5 +1,10 @@
env
.env
CREDENTIALS.json
-ploutos_f.tar
-ploutos.tar
\ No newline at end of file
+.*.tar
+nginx
+env.example
+.*.log
+README.md
+.*.png
+LICENSE
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 1046701..79697c3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,64 +1,5 @@
-*.log
-*.csv
-*.csv#
-*.pyc
-*.json
-*.Rhistory
-_dashboard/__pycache__/*
-*.ipynb
-_migrations/__pycache__/*
-/Ploutos/Ploutos/pycashe/*
-/Ploutos/dashboard/migrations/*
-!/Ploutos/dashboard/migrations/__init__.py
-/Ploutos/Ploutos/dashboard/migrations/pycashe/*
-/Ploutos/Ploutos/dashboard/pycashe/*
-*.sqlite3
-
-
-
-#ignore migrations except the folder and __init__.py
-**/migrations/**
-!**/migrations
-!**/migrations/__init__.py
-
-# Git Hub Boiler plate .gitignore
-
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*.py[cod]
-
-# C extensions
-*.so
-
-# Distribution / packaging
-bin/
-build/
-develop-eggs/
-dist/
-eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-*.egg-info/
-.installed.cfg
-*.egg
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-.tox/
-.coverage
-.cache
-nosetests.xml
-coverage.xml
-
-# Django stuff:
-*.log
-*.pot
-
+.ruff_cache
+.env
env
-.env
\ No newline at end of file
+__pycache__
+*.log
\ No newline at end of file
diff --git a/Ploutos/dashboard/templates/dashboard/base.html b/Ploutos/dashboard/templates/dashboard/base.html
deleted file mode 100644
index d8dd7e3..0000000
--- a/Ploutos/dashboard/templates/dashboard/base.html
+++ /dev/null
@@ -1,113 +0,0 @@
-{% load static %}
-{% load compress %}
-
-
-
- Ploutos
-
-
-
-
-
-
-
- {% compress js %}
-
-
- {% endcompress %}
-
-
- {% compress css %}
-
-
- {% endcompress %}
-
-
- {% compress js %}
-
- {% endcompress %}
-
- {% compress css %}
-
-
-
-
-
-
-
- {% endcompress %}
-
- {% compress js %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% endcompress %}
-
- {% compress js %}
-
-
-
-
-
-
- {% endcompress %}
-
-
-
-
-
- {% block content %}{% endblock %}
-
- {% block script %}{% endblock %}
-
-
\ No newline at end of file
diff --git a/Ploutos/dashboard/urls.py b/Ploutos/dashboard/urls.py
deleted file mode 100644
index b79b388..0000000
--- a/Ploutos/dashboard/urls.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from django.urls import path
-from . import views
-
-
-urlpatterns = [
- path("ploutos/", views.index, name="index"),
- path("ploutos/storage/", views.storage_chart, name="storage"),
- path("ploutos/compute/", views.compute_graph, name="compute"),
- path("ploutos/leaderboard/", views.leaderboard, name="leaderboard"),
- path("ploutos/files/", views.files, name="files"),
- path("ploutos/tars/", views.tars, name="tars"),
-]
diff --git a/Ploutos/nginx/Dockerfile b/Ploutos/nginx/Dockerfile
deleted file mode 100644
index 93b53f8..0000000
--- a/Ploutos/nginx/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM nginx:1.23
-
-RUN apt-get -y update
-RUN apt-get -y install vim
-
-RUN rm /etc/nginx/conf.d/default.conf
-COPY nginx.conf /etc/nginx/conf.d
\ No newline at end of file
diff --git a/Ploutos/templates/registration/login.html b/Ploutos/templates/registration/login.html
deleted file mode 100644
index b6cff4b..0000000
--- a/Ploutos/templates/registration/login.html
+++ /dev/null
@@ -1,15 +0,0 @@
-{% extends 'base.html' %}
-
-{% block content %}
-{% load crispy_forms_tags %}
-
-{% endblock %}
\ No newline at end of file
diff --git a/Ploutos/Ploutos/__init__.py b/core/__init__.py
similarity index 100%
rename from Ploutos/Ploutos/__init__.py
rename to core/__init__.py
diff --git a/Ploutos/Ploutos/asgi.py b/core/asgi.py
similarity index 82%
rename from Ploutos/Ploutos/asgi.py
rename to core/asgi.py
index 6a03011..bee1dfd 100644
--- a/Ploutos/Ploutos/asgi.py
+++ b/core/asgi.py
@@ -11,6 +11,6 @@
from django.core.asgi import get_asgi_application
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Ploutos.settings")
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings")
application = get_asgi_application()
diff --git a/Ploutos/Ploutos/settings.py b/core/settings.py
similarity index 74%
rename from Ploutos/Ploutos/settings.py
rename to core/settings.py
index f10f2ed..e5f7d25 100644
--- a/Ploutos/Ploutos/settings.py
+++ b/core/settings.py
@@ -19,9 +19,27 @@
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
+PROJ_COLOUR_DICT = {
+ "001": "rgb(228,26,28)",
+ "002": "rgb(55,126,184)",
+ "003": "rgb(77,175,74)",
+ "004": "rgb(152,78,163)",
+}
+
+ASSAY_COLOUR_DICT = {
+ "CEN": "rgb(127, 60, 141)",
+ "MYE": "rgb(17, 165, 121)",
+ "TWE": "rgb(57, 105, 172)",
+ "TSO500": "rgb(242, 183, 1)",
+ "SNP": "rgb(231, 63, 116)",
+ "CP": "rgb(128, 186, 90)",
+ "WES": "rgb(230, 131, 16)",
+ "FH": "rgb(0, 134, 149)",
+}
+
# Assign keys to data from the file
try:
- # Database Env
+ # database
DB_USERNAME = os.environ["DB_USERNAME"]
DB_PASSWORD = os.environ["DB_PASSWORD"]
DB_NAME = os.environ["DB_NAME"]
@@ -46,65 +64,56 @@
LIVE_STORAGE_COST_MONTH = os.environ["LIVE_STORAGE_COST_MONTH"]
ARCHIVED_STORAGE_COST_MONTH = os.environ["ARCHIVED_STORAGE_COST_MONTH"]
- # Allowed Host
+ # allowed host
hosts = os.environ["HOSTS"]
ALLOWED_HOSTS = [host.strip() for host in hosts.split(",")]
# Debug Mode (default False)
DEBUG = os.environ.get("PLOUTOS_DEBUG", False)
- # Graph Plot Colour Env
- proj_dict = os.environ["PROJ_COLOUR_DICT"]
- PROJ_COLOUR_DICT = dict(
- [tuple(x.split("_")) for x in proj_dict.split("|")]
- )
-
- assay_dict = os.environ["ASSAY_COLOUR_DICT"]
- ASSAY_COLOUR_DICT = dict(
- [tuple(x.split("_")) for x in assay_dict.split("|")]
- )
-
- USER_COLOUR_DICT = {}
-
- # Form CSRF Stuff
+ # form CSRF
origins = os.environ["PLOUTOS_ORIGINS"]
CSRF_TRUSTED_ORIGINS = [origin.strip() for origin in origins.split(",")]
+ # running on local workstatino or not
+ LOCALHOST = os.environ.get("LOCALHOST", False)
+
except KeyError as e:
- print(f"ERROR with importing {e}")
+ print(e)
LOGIN_URL = "/ploutos/accounts/login"
LOGIN_REDIRECT_URL = "/ploutos"
+# LOGOUT_REDIRECT_URL = "/ploutos/"
+
# Application definition
INSTALLED_APPS = [
"dashboard.apps.DashboardConfig",
"django.contrib.admin",
- "django.contrib.auth",
- "django.contrib.contenttypes",
+ "django.contrib.auth", # auth
+ "django.contrib.contenttypes", # auth
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django_extensions",
"crispy_bootstrap5",
"crispy_forms",
- "compressor",
"cachalot",
"debug_toolbar",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
- "django.contrib.sessions.middleware.SessionMiddleware",
+ "django.contrib.sessions.middleware.SessionMiddleware", # auth
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
- "django.contrib.auth.middleware.AuthenticationMiddleware",
+ "django.contrib.auth.middleware.AuthenticationMiddleware", # auth
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
- "debug_toolbar.middleware.DebugToolbarMiddleware",
+ "debug_toolbar.middleware.DebugToolbarMiddleware", # debug toolbar
]
-ROOT_URLCONF = "Ploutos.urls"
+ROOT_URLCONF = "core.urls"
TEMPLATES = [
{
@@ -125,7 +134,7 @@
},
]
-WSGI_APPLICATION = "Ploutos.wsgi.application"
+WSGI_APPLICATION = "core.wsgi.application"
# Database
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
@@ -133,11 +142,11 @@
DATABASES = {
"default": {
"ENGINE": "django.db.backends.mysql",
- "NAME": DB_NAME,
- "USER": DB_USERNAME,
- "PASSWORD": DB_PASSWORD,
- "HOST": "db", # Edit this in local Runserver
- "PORT": DB_PORT,
+ "NAME": os.environ.get("DB_NAME"),
+ "USER": os.environ.get("DB_USERNAME"),
+ "PASSWORD": os.environ.get("DB_PASSWORD"),
+ "HOST": os.environ.get("DB_HOST", "localhost"), # NOTE: edit in localhost
+ "PORT": int(os.environ.get("DB_PORT", 3306)),
}
}
@@ -161,12 +170,7 @@
]
-# Internationalization
-# https://docs.djangoproject.com/en/4.0/topics/i18n/
-
LANGUAGE_CODE = "en-us"
-
-# TIME_ZONE = 'Europe/Berlin'
TIME_ZONE = "GMT"
USE_I18N = True
@@ -180,10 +184,9 @@
# Logging (Docker)
-LOGGING_PATHWAY = '/logs/'
-ERROR_LOG = "ploutos-error.log" # Edit in local Runserver
-DEBUG_LOG = "ploutos-debug.log" # Edit in local Runserver
-EXECUTION_LOG = "executions_log.log" # Edit in local Runserver
+ERROR_LOG = "/logs/ploutos-error.log" # NOTE: edit in localhost
+DEBUG_LOG = "/logs/ploutos-debug.log" # NOTE: edit in localhost
+EXECUTION_LOG = "/logs/executions_log.log" # NOTE: edit in localhost
with open(ERROR_LOG, "a+"):
pass
@@ -238,24 +241,16 @@
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
-STATIC_URL = "ploutos/static/"
+STATIC_URL = "static/" if LOCALHOST else "ploutos/static/"
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
-STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"),)
+STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
+
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
-INTERNAL_IPS = [
- # ...
- "127.0.0.1",
- # ...
-]
-
-LOGIN_REDIRECT_URL = "/"
-LOGOUT_REDIRECT_URL = "/"
-
# Log out when the browser is closed
# Also log user out after half an hour of inactivity
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
@@ -265,16 +260,21 @@
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
- "compressor.finders.CompressorFinder",
)
-COMPRESS_ENABLED = True
-
-# CACHE SETTING
-# COMMENT OUT in local Runserver
-CACHES = {
- "default": {
- "BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache",
- "LOCATION": "cache:11211",
+# Cache
+# NOTE: comment out in localhost
+CACHES = (
+ {
+ "default": {
+ "BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache",
+ "LOCATION": "cache:11211",
+ }
}
-}
+ if not LOCALHOST
+ else {
+ "default": {
+ "BACKEND": "django.core.cache.backends.dummy.DummyCache",
+ }
+ }
+)
diff --git a/Ploutos/Ploutos/urls.py b/core/urls.py
similarity index 89%
rename from Ploutos/Ploutos/urls.py
rename to core/urls.py
index 82977ea..0333608 100644
--- a/Ploutos/Ploutos/urls.py
+++ b/core/urls.py
@@ -16,9 +16,10 @@
from django.contrib import admin
from django.urls import include, path
+from core.settings import LOCALHOST
urlpatterns = [
- path("", include("dashboard.urls")),
+ path("" if LOCALHOST else "ploutos/", include("dashboard.urls")),
path("ploutos/admin/", admin.site.urls),
path("ploutos/accounts/", include("django.contrib.auth.urls")),
# debug toolbar. only show during DEBUG True
diff --git a/Ploutos/Ploutos/wsgi.py b/core/wsgi.py
similarity index 82%
rename from Ploutos/Ploutos/wsgi.py
rename to core/wsgi.py
index 7a621f2..d93e33a 100644
--- a/Ploutos/Ploutos/wsgi.py
+++ b/core/wsgi.py
@@ -11,6 +11,6 @@
from django.core.wsgi import get_wsgi_application
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Ploutos.settings")
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings")
application = get_wsgi_application()
diff --git a/crontab b/crontab
index 504c0cd..cde0c4a 100644
--- a/crontab
+++ b/crontab
@@ -1,4 +1,4 @@
# start cron
-0 1 * * * /usr/local/bin/python /app/Ploutos/manage.py runscript populate_db >> /logs/ploutos-cron.log 2>&1
-0 * * * * /usr/local/bin/python /app/Ploutos/scripts/test_cron.py >> /logs/ploutos-cron.log 2>&1
+0 1 * * * /usr/local/bin/python /app/manage.py runscript populate_db >> /logs/ploutos-cron.log 2>&1
+0 * * * * /usr/local/bin/python /app/scripts/test_cron.py >> /logs/ploutos-cron.log 2>&1
# end cron
\ No newline at end of file
diff --git a/Ploutos/dashboard/__init__.py b/dashboard/__init__.py
similarity index 100%
rename from Ploutos/dashboard/__init__.py
rename to dashboard/__init__.py
diff --git a/Ploutos/dashboard/admin.py b/dashboard/admin.py
similarity index 100%
rename from Ploutos/dashboard/admin.py
rename to dashboard/admin.py
diff --git a/Ploutos/dashboard/apps.py b/dashboard/apps.py
similarity index 100%
rename from Ploutos/dashboard/apps.py
rename to dashboard/apps.py
diff --git a/Ploutos/dashboard/forms.py b/dashboard/forms.py
similarity index 92%
rename from Ploutos/dashboard/forms.py
rename to dashboard/forms.py
index 663df29..816801c 100644
--- a/Ploutos/dashboard/forms.py
+++ b/dashboard/forms.py
@@ -51,16 +51,12 @@ def months_and_years(months_years_present_query):
# Set as tuple choices
month_year_choices = (
(entry, converted_entry)
- for entry, converted_entry in zip(
- months_and_years_list, converted_entries
- )
+ for entry, converted_entry in zip(months_and_years_list, converted_entries)
)
month_year_choices_2 = (
(entry, converted_entry)
- for entry, converted_entry in zip(
- months_and_years_list, converted_entries
- )
+ for entry, converted_entry in zip(months_and_years_list, converted_entries)
)
return month_year_choices, month_year_choices_2
@@ -103,9 +99,7 @@ def clean(self):
if not start and not end:
self.add_error(None, error="Please enter dates")
if start and not end:
- self.add_error(
- "end", "If entering a start date please include an end date"
- )
+ self.add_error("end", "If entering a start date please include an end date")
if end and not start:
self.add_error(
@@ -125,9 +119,7 @@ def __init__(self, *args, **kwargs):
if DailyOrgRunningTotal.objects.exists():
first_date = str(
- DailyOrgRunningTotal.objects.order_by("date__date")
- .first()
- .date
+ DailyOrgRunningTotal.objects.order_by("date__date").first().date
)
self.fields["start"].widget.attrs["min"] = first_date
self.fields["end"].widget.attrs["min"] = first_date
@@ -272,9 +264,7 @@ def clean(self):
)
elif end < start:
# Check end month not before start
- raise ValidationError(
- "Please ensure start month is before end month"
- )
+ raise ValidationError("Please ensure start month is before end month")
elif (
project_type
and assay_type
@@ -314,9 +304,7 @@ def __init__(self, *args, **kwargs):
self.fields[
"project_type"
].help_text = "Filters project names using 'startswith'"
- self.fields[
- "assay_type"
- ].help_text = "Filters project names using 'endswith'"
+ self.fields["assay_type"].help_text = "Filters project names using 'endswith'"
self.helper = FormHelper()
self.helper.layout = Layout(
@@ -392,9 +380,7 @@ def clean(self):
# Check end month not before start
if end < start:
- raise ValidationError(
- "Please ensure start month is before end month"
- )
+ raise ValidationError("Please ensure start month is before end month")
return self.cleaned_data
@@ -425,9 +411,7 @@ def __init__(self, *args, **kwargs):
self.fields[
"project_type"
].help_text = "Filters project names using 'startswith'"
- self.fields[
- "assay_type"
- ].help_text = "Filters project names using 'endswith'"
+ self.fields["assay_type"].help_text = "Filters project names using 'endswith'"
self.helper = FormHelper()
self.helper.layout = Layout(
@@ -493,13 +477,15 @@ def clean(self):
start = self.cleaned_data["start"]
end = self.cleaned_data["end"]
- project_len = len([n for n in self.cleaned_data["project_type"].split(',') if n.strip()])
- user_len = len([n for n in self.cleaned_data['user_type'].split(',') if n.strip()])
+ project_len = len(
+ [n for n in self.cleaned_data["project_type"].split(",") if n.strip()]
+ )
+ user_len = len(
+ [n for n in self.cleaned_data["user_type"].split(",") if n.strip()]
+ )
if project_len > 5 or user_len > 5:
- raise ValidationError(
- "Maximum 5 for project type or user type"
- )
+ raise ValidationError("Maximum 5 for project type or user type")
# Check both start and end included
if start == "---" and end != "---":
@@ -514,9 +500,7 @@ def clean(self):
# Check end month not before start
if end < start:
- raise ValidationError(
- "Please ensure start month is before end month"
- )
+ raise ValidationError("Please ensure start month is before end month")
return self.cleaned_data
@@ -546,9 +530,7 @@ def __init__(self, *args, **kwargs):
self.fields[
"project_type"
].help_text = "Filters project names using 'startswith'"
- self.fields[
- "user_type"
- ].help_text = "Filters usernames using 'endswith'"
+ self.fields["user_type"].help_text = "Filters usernames using 'endswith'"
self.helper = FormHelper()
self.helper.layout = Layout(
@@ -627,14 +609,10 @@ def __init__(self, *args, **kwargs):
# Find earliest object in runningtotals by date + get date
# This is to set min date for datepicker + validate
if FileTypeDate.objects.exists():
- first_date = str(
- FileTypeDate.objects.order_by("date__date").first().date
- )
+ first_date = str(FileTypeDate.objects.order_by("date__date").first().date)
self.fields["date_to_filter"].widget.attrs["min"] = first_date
self.fields[
"project_type"
].help_text = "Filters project names using 'startswith'"
- self.fields[
- "assay_type"
- ].help_text = "Filters project names using 'endswith'"
+ self.fields["assay_type"].help_text = "Filters project names using 'endswith'"
diff --git a/Ploutos/dashboard/migrations/__init__.py b/dashboard/migrations/__init__.py
similarity index 100%
rename from Ploutos/dashboard/migrations/__init__.py
rename to dashboard/migrations/__init__.py
diff --git a/Ploutos/dashboard/models.py b/dashboard/models.py
similarity index 100%
rename from Ploutos/dashboard/models.py
rename to dashboard/models.py
diff --git a/dashboard/templates/dashboard/base.html b/dashboard/templates/dashboard/base.html
new file mode 100644
index 0000000..7b9fa04
--- /dev/null
+++ b/dashboard/templates/dashboard/base.html
@@ -0,0 +1,111 @@
+{% load static %}
+
+
+
+
+ Ploutos
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% block content %}{% endblock %}
+
+ {% block script %}{% endblock %}
+
+
+
\ No newline at end of file
diff --git a/Ploutos/dashboard/templates/dashboard/compute.html b/dashboard/templates/dashboard/compute.html
similarity index 100%
rename from Ploutos/dashboard/templates/dashboard/compute.html
rename to dashboard/templates/dashboard/compute.html
diff --git a/Ploutos/dashboard/templates/dashboard/files.html b/dashboard/templates/dashboard/files.html
similarity index 100%
rename from Ploutos/dashboard/templates/dashboard/files.html
rename to dashboard/templates/dashboard/files.html
diff --git a/Ploutos/dashboard/templates/dashboard/index.html b/dashboard/templates/dashboard/index.html
similarity index 100%
rename from Ploutos/dashboard/templates/dashboard/index.html
rename to dashboard/templates/dashboard/index.html
diff --git a/Ploutos/dashboard/templates/dashboard/leaderboard.html b/dashboard/templates/dashboard/leaderboard.html
similarity index 100%
rename from Ploutos/dashboard/templates/dashboard/leaderboard.html
rename to dashboard/templates/dashboard/leaderboard.html
diff --git a/Ploutos/dashboard/templates/dashboard/storage.html b/dashboard/templates/dashboard/storage.html
similarity index 100%
rename from Ploutos/dashboard/templates/dashboard/storage.html
rename to dashboard/templates/dashboard/storage.html
diff --git a/Ploutos/dashboard/templates/dashboard/tars.html b/dashboard/templates/dashboard/tars.html
similarity index 100%
rename from Ploutos/dashboard/templates/dashboard/tars.html
rename to dashboard/templates/dashboard/tars.html
diff --git a/Ploutos/dashboard/tests.py b/dashboard/tests.py
similarity index 100%
rename from Ploutos/dashboard/tests.py
rename to dashboard/tests.py
diff --git a/dashboard/urls.py b/dashboard/urls.py
new file mode 100644
index 0000000..4e92994
--- /dev/null
+++ b/dashboard/urls.py
@@ -0,0 +1,12 @@
+from django.urls import path
+from . import views
+
+
+urlpatterns = [
+ path("", views.index, name="index"),
+ path("storage/", views.storage_chart, name="storage"),
+ path("compute/", views.compute_graph, name="compute"),
+ path("leaderboard/", views.leaderboard, name="leaderboard"),
+ path("files/", views.files, name="files"),
+ path("tars/", views.tars, name="tars"),
+]
diff --git a/Ploutos/dashboard/views.py b/dashboard/views.py
similarity index 93%
rename from Ploutos/dashboard/views.py
rename to dashboard/views.py
index 6945443..fddca2a 100644
--- a/Ploutos/dashboard/views.py
+++ b/dashboard/views.py
@@ -45,7 +45,7 @@ def get_default_dates() -> Union[date, date]:
return default_start_date, default_end_date
-@login_required
+@login_required()
def index(request):
"""View to display running total charges via Plotly"""
@@ -75,9 +75,7 @@ def index(request):
start_year, start_month = monthly_form.cleaned_data.get(
"start_month"
).split("-")
- end_year, end_month = monthly_form.cleaned_data.get(
- "end_month"
- ).split("-")
+ end_year, end_month = monthly_form.cleaned_data.get("end_month").split("-")
start_date = date(int(start_year), int(start_month), 1)
end_date = date(
@@ -87,9 +85,7 @@ def index(request):
)
chart_data = rtp.daily_plot(start_date, end_date)
- monthly_chart_data = rtp.monthly_between_dates(
- start_date, end_date
- )
+ monthly_chart_data = rtp.monthly_between_dates(start_date, end_date)
else:
monthly_chart_data = rtp.monthly_between_dates(
default_start_date, default_end_date
@@ -136,16 +132,10 @@ def storage_chart(request):
start_date: str = form.cleaned_data.get("start")
end_date: str = form.cleaned_data.get("end")
project_type: list[str] = [
- x
- for x in form.cleaned_data.get("project_type")
- .strip()
- .split(",")
- if x
+ x for x in form.cleaned_data.get("project_type").strip().split(",") if x
]
assay_type: list[str] = [
- x
- for x in form.cleaned_data.get("assay_type").strip().split(",")
- if x
+ x for x in form.cleaned_data.get("assay_type").strip().split(",") if x
]
if start_date == "---" and end_date == "---":
@@ -190,9 +180,7 @@ def storage_chart(request):
)
else:
form = StorageForm()
- context = sp.all_projects_between_months(
- default_start_date, default_end_date
- )
+ context = sp.all_projects_between_months(default_start_date, default_end_date)
return render(
request,
@@ -242,9 +230,7 @@ def files(request):
]
if project_types and assay_types:
- context = fp.get_size(
- project_types, assay_types, date_to_filter
- )
+ context = fp.get_size(project_types, assay_types, date_to_filter)
elif project_types and not assay_types:
context = fp.get_size(
project_type=project_types, date_to_filter=date_to_filter
@@ -272,9 +258,7 @@ def files(request):
"form": form,
"live_total": live_total,
"archived_total": archived_total,
- "date_to_display": context.get(
- "date_to_filter", "No Date Returned"
- ),
+ "date_to_display": context.get("date_to_filter", "No Date Returned"),
"project_or_assay": context.get("project_or_assay", False),
"nbar": "files",
},
@@ -492,22 +476,16 @@ def leaderboard(request):
)
elif user_types and not project_types:
- context = upf.daily_filter_byusers(
- month_start, month_end, user_types
- )
+ context = upf.daily_filter_byusers(month_start, month_end, user_types)
else:
# months selected but display default currently.
- context = upf.default_daily_nofilter_allprojects(
- month_start, month_end
- )
+ context = upf.default_daily_nofilter_allprojects(month_start, month_end)
else:
logger.error(f"Invalid Form Input: {form.cleaned_data}")
- context = upf.Users_allprojects(
- default_start_date, default_end_date
- )
+ context = upf.Users_allprojects(default_start_date, default_end_date)
elif "MONTHLY" in request.POST:
form = LeaderboardForm(request.POST)
@@ -552,14 +530,10 @@ def leaderboard(request):
month_start, month_end, project_types
)
elif user_types and not project_types:
- context = upf.monthly_filter_byusers(
- month_start, month_end, user_types
- )
+ context = upf.monthly_filter_byusers(month_start, month_end, user_types)
else:
# monthly selected but display default currently.
- context = upf.monthly_nofilter_allprojects(
- month_start, month_end
- )
+ context = upf.monthly_nofilter_allprojects(month_start, month_end)
else:
logger.error(f"Invalid Form Input {form.cleaned_data}")
diff --git a/docker-compose.yml b/docker-compose.yml
index eebc0fc..32ed36f 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -3,35 +3,34 @@ services:
db:
container_name: ploutos-db
image: mysql:8
- environment:
- MYSQL_TCP_PORT: 3307 # change depending on PORT documentation
ports:
- - "3307:3307" # change depending on above
- env_file:
- - /home/jason/github/Ploutos/.env # config file directory
+ - "3307:3306" # change depending on above
+ environment:
+ - MYSQL_ROOT_PASSWORD=
+ - MYSQL_ROOT_HOST=
volumes:
- ploutos_db:/var/lib/mysql
web:
container_name: ploutos-web
image: ploutos-web:1.0-a
build: .
- command: bash -c "python Ploutos/manage.py collectstatic --noinput && gunicorn Ploutos.wsgi:application --chdir /app/Ploutos --bind :8003" # change PORT based on doc
+ command: bash -c "python manage.py collectstatic --noinput && gunicorn core.wsgi:application --chdir /app --bind :8003" # change PORT based on doc
expose:
- 8003 # change based on above
env_file:
- - /home/jason/github/Ploutos/.env # change config directory
+ - /Users/jason/Github/Ploutos/.env # change config directory
volumes:
- - /home/jason/github/Ploutos/Ploutos/logs:/logs
- - ploutos_static:/app/Ploutos/staticfiles
+ - /Users/jason/Github/Ploutos/env:/logs # change logging directory mount
+ - ploutos_static:/app/staticfiles
depends_on:
- db
- cache # this is necessary
nginx:
container_name: ploutos-nginx
- image: ploutos-nginx:1.0
- build: Ploutos/nginx
+ image: nginx:1.23
volumes:
- ploutos_static:/staticfiles
+ - /Users/jason/Github/Ploutos/nginx:/etc/nginx/conf.d
environment:
- VIRTUAL_HOST=localhost # change based on HOST
- VIRTUAL_PATH=/ploutos # change based on PATH
diff --git a/Ploutos/manage.py b/manage.py
similarity index 89%
rename from Ploutos/manage.py
rename to manage.py
index eaf4ea4..4e20ce5 100755
--- a/Ploutos/manage.py
+++ b/manage.py
@@ -6,7 +6,7 @@
def main():
"""Run administrative tasks."""
- os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Ploutos.settings")
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
diff --git a/Ploutos/nginx/nginx.conf b/nginx/nginx.conf
similarity index 93%
rename from Ploutos/nginx/nginx.conf
rename to nginx/nginx.conf
index e6e2f1c..786a7d8 100644
--- a/Ploutos/nginx/nginx.conf
+++ b/nginx/nginx.conf
@@ -13,7 +13,7 @@ server {
proxy_redirect off;
}
- # django static files
+ # django static files
location /ploutos/static/ {
autoindex on;
alias /staticfiles/;
diff --git a/requirements.txt b/requirements.txt
index 78114d3..1c86810 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -11,11 +11,10 @@ cryptography==36.0.2
Django==4.1.3
django-appconf==1.0.5
django-cachalot==2.5.2
-django-compressor==4.3.1
django-crispy-forms==1.14.0
django-crontab==0.7.1
-django-debug-toolbar==3.8.1
django-extensions==3.2.1
+django_debug_toolbar==3.8.1
dxpy==0.331.0
gunicorn==20.1.0
idna==3.4
@@ -42,6 +41,6 @@ six==1.16.0
sqlparse==0.4.3
tenacity==8.1.0
tomli==2.0.1
-typing-extensions==4.5.0
+typing_extensions==4.5.0
urllib3==1.26.12
websocket-client==0.54.0
diff --git a/Ploutos/scripts/DNAnexus_queries.py b/scripts/DNAnexus_queries.py
similarity index 94%
rename from Ploutos/scripts/DNAnexus_queries.py
rename to scripts/DNAnexus_queries.py
index 66b7e33..34bd728 100644
--- a/Ploutos/scripts/DNAnexus_queries.py
+++ b/scripts/DNAnexus_queries.py
@@ -178,9 +178,7 @@ def get_files(proj: str) -> dict:
dx.search.find_data_objects(
classname="file",
project=proj,
- describe={
- "fields": {"archivalState": True, "size": True, "name": True}
- },
+ describe={"fields": {"archivalState": True, "size": True, "name": True}},
)
)
@@ -431,16 +429,16 @@ def merge_files_and_proj_dfs(
# Replace the state 'archival' to live for easy grouping later
# As they're technically charged as live
- files_with_proj_created["state"] = files_with_proj_created[
- "state"
- ].str.replace("archival", "live")
+ files_with_proj_created["state"] = files_with_proj_created["state"].str.replace(
+ "archival", "live"
+ )
# Replace unarchiving with archived so adding missing rows works
# See - https://documentation.dnanexus.com/user/objects/archiving-files
# This explains the archiving process -
# unarchiving files are still currently archived.
- files_with_proj_created["state"] = files_with_proj_created[
- "state"
- ].str.replace("unarchiving", "archived")
+ files_with_proj_created["state"] = files_with_proj_created["state"].str.replace(
+ "unarchiving", "archived"
+ )
return files_with_proj_created
@@ -619,9 +617,7 @@ def add_missing_states_projects_file_types(
# With size and count as zero
states_filled_in = file_type_agg_df.set_index(["project", "state"])
states_filled_in = states_filled_in.reindex(
- index=pd.MultiIndex.from_product(
- iterables, names=["project", "state"]
- ),
+ index=pd.MultiIndex.from_product(iterables, names=["project", "state"]),
fill_value=0,
).reset_index()
@@ -629,9 +625,7 @@ def add_missing_states_projects_file_types(
# Which might only contain duplicates or not that file type
how_many_unique_projects = list(file_df.project.unique())
projects_left_in_this_df = list(states_filled_in.project.unique())
- empty_projs = list(
- set(how_many_unique_projects) - set(projects_left_in_this_df)
- )
+ empty_projs = list(set(how_many_unique_projects) - set(projects_left_in_this_df))
# Append two dicts for live and archived rows
# For the projects without that file type
@@ -792,9 +786,7 @@ def group_by_project_and_rename(
# Group by project and file state
# And sum the size column to get total size (with duplicates)
grouped_df = (
- df_name.groupby(["project", "state"])
- .agg(size=("size", "sum"))
- .reset_index()
+ df_name.groupby(["project", "state"]).agg(size=("size", "sum")).reset_index()
)
# Replace the values with unique_ as it makes it easier to merge later
@@ -862,9 +854,7 @@ def calculate_totals(
return my_grouped_df
-def merge_together_add_empty_rows(
- df1: pd.DataFrame, df2: pd.DataFrame
-) -> pd.DataFrame:
+def merge_together_add_empty_rows(df1: pd.DataFrame, df2: pd.DataFrame) -> pd.DataFrame:
"""
Merge two dataframes to make final dict and add zeros for file state
categories which don't exist
@@ -905,9 +895,7 @@ def merge_together_add_empty_rows(
]
total_merged_df = total_merged_df.set_index(["project", "state"])
total_merged_df = total_merged_df.reindex(
- index=pd.MultiIndex.from_product(
- iterables, names=["project", "state"]
- ),
+ index=pd.MultiIndex.from_product(iterables, names=["project", "state"]),
fill_value=0,
).reset_index()
@@ -997,9 +985,9 @@ def put_into_dict(final_all_projs_df: pd.DataFrame) -> dict:
"""
all_proj_dict = {
proj: state.loc[proj].to_dict("index")
- for proj, state in final_all_projs_df.set_index(
- ["project", "state"]
- ).groupby(level="project")
+ for proj, state in final_all_projs_df.set_index(["project", "state"]).groupby(
+ level="project"
+ )
}
return all_proj_dict
@@ -1074,9 +1062,7 @@ def get_executions(proj: str) -> dict:
r"[0-9]\.[0-9]\.[0-9]", executable_name
).group(0)
except Exception as e:
- logger.error(
- f"Error: {e}.\n" f"No app found: {job_exec}"
- )
+ logger.error(f"Error: {e}.\n" f"No app found: {job_exec}")
print(f"no app found {job_exec}")
version = ""
@@ -1099,14 +1085,10 @@ def get_executions(proj: str) -> dict:
executable_name = job["describe"]["executableName"]
# slow way of doing this by adding another request.
try:
- app_described = dx.api.app_describe(
- app_name_or_id=job_exec
- )
+ app_described = dx.api.app_describe(app_name_or_id=job_exec)
version = app_described["version"]
except Exception as e:
- logger.error(
- f"Error: {e}.\n" f"No app found: {job_exec}"
- )
+ logger.error(f"Error: {e}.\n" f"No app found: {job_exec}")
print(f"no app found {job_exec}")
version = ""
@@ -1137,9 +1119,7 @@ def get_executions(proj: str) -> dict:
except Exception as e:
logger.error(
f"Error: {e}.\n"
- "No app found: {}".format(
- job["describe"]["executable"]
- )
+ "No app found: {}".format(job["describe"]["executable"])
)
print(f"no app found {job['describe']['executable']}")
version = ""
@@ -1148,9 +1128,7 @@ def get_executions(proj: str) -> dict:
{
"id": job["id"],
"job_name": job["describe"]["name"],
- "executable_name": job["describe"][
- "executableName"
- ],
+ "executable_name": job["describe"]["executableName"],
"version": version,
"cost": job["describe"]["totalPrice"],
"class": job["describe"]["class"],
@@ -1159,9 +1137,7 @@ def get_executions(proj: str) -> dict:
"created": job["describe"]["created"],
"modified": job["describe"]["modified"],
"launchedBy": job["describe"]["launchedBy"],
- "createdBy": job["describe"]["workflow"][
- "createdBy"
- ],
+ "createdBy": job["describe"]["workflow"]["createdBy"],
"Stages": job["describe"]["stages"],
}
)
@@ -1169,9 +1145,7 @@ def get_executions(proj: str) -> dict:
logger.error(
"Error: New executable type found {}".format(job["id"])
)
- print(
- "Error: New executable type found {}".format(job["id"])
- )
+ print("Error: New executable type found {}".format(job["id"]))
return project_executions_dict
@@ -1421,8 +1395,7 @@ def get_subjobs_make_job_executions_df(
states = desc["stateTransitions"]
runtime_epoch = (
- desc["stoppedRunning"]
- - desc["startedRunning"]
+ desc["stoppedRunning"] - desc["startedRunning"]
)
desc["runtime"] = runtime_epoch
desc["start_epoch"] = subjob["describe"][
@@ -1433,15 +1406,11 @@ def get_subjobs_make_job_executions_df(
]
subjobs_list.append(subjob)
# Append data (dict) to list of executions.
- project_executions_dict[project_id][
- "executions"
- ].append(
+ project_executions_dict[project_id]["executions"].append(
{
"id": entry["id"],
"job_name": entry["job_name"],
- "executable_name": entry[
- "executable_name"
- ],
+ "executable_name": entry["executable_name"],
"version": entry["version"],
"cost": entry["cost"],
"class": entry["class"],
@@ -1454,9 +1423,7 @@ def get_subjobs_make_job_executions_df(
}
)
else:
- logger.info(
- f"{entry} TotalPrice not present - not charged"
- )
+ logger.info(f"{entry} TotalPrice not present - not charged")
continue
else:
logger.error(
@@ -1476,9 +1443,7 @@ def get_subjobs_make_job_executions_df(
if not df.empty:
# Find runtime for each execution.
df["Result"] = df["Executions"].apply(
- lambda x: sum(
- [int(value["describe"]["runtime"]) for value in x]
- )
+ lambda x: sum([int(value["describe"]["runtime"]) for value in x])
)
df["Runtime_min_epoch"] = df["Executions"].apply(
lambda x: min(
@@ -1488,10 +1453,7 @@ def get_subjobs_make_job_executions_df(
)
df["Runtime_max_epoch"] = df["Executions"].apply(
lambda x: max(
- [
- int(value["describe"]["finish_epoch"])
- for value in x
- ],
+ [int(value["describe"]["finish_epoch"]) for value in x],
default=0,
)
)
@@ -1499,9 +1461,7 @@ def get_subjobs_make_job_executions_df(
# convert to timedelta for storing as DurationField
df["Result_td"] = pd.to_timedelta(df["Result"], "ms")
- df["Start_to_finish_td"] = pd.to_timedelta(
- df["Runtime_epoch"], "ms"
- )
+ df["Start_to_finish_td"] = pd.to_timedelta(df["Runtime_epoch"], "ms")
appended_data.append(df)
# check if there is data in the appended_data list
@@ -1573,9 +1533,7 @@ def get_executions_from_list() -> dict:
elif job["class"] == "analysis":
proj = job["project"]
executable_Name = job["describe"]["executableName"]
- version = re.search(
- r"[0-9]\.[0-9]\.[0-9]", executable_Name
- ).group(0)
+ version = re.search(r"[0-9]\.[0-9]\.[0-9]", executable_Name).group(0)
project_executions_dict[proj]["executions"].append(
{
@@ -1613,9 +1571,7 @@ def get_executions_from_list() -> dict:
except Exception as e:
logger.error(e)
logger.error(
- "No app found: {}".format(
- job["describe"]["executable"]
- )
+ "No app found: {}".format(job["describe"]["executable"])
)
version = ""
project_executions_dict[proj]["executions"].append(
@@ -1743,14 +1699,10 @@ def orchestrate_get_files(
other_df = make_file_type_aggregate_df(unique_df, "others")
# basically adding the opposite if exist
- fastq_final = add_missing_states_projects_file_types(
- file_df, fastq_df, "fastq"
- )
+ fastq_final = add_missing_states_projects_file_types(file_df, fastq_df, "fastq")
vcf_final = add_missing_states_projects_file_types(file_df, vcf_df, "vcf")
bam_final = add_missing_states_projects_file_types(file_df, bam_df, "bam")
- other_final = add_missing_states_projects_file_types(
- file_df, other_df, "others"
- )
+ other_final = add_missing_states_projects_file_types(file_df, other_df, "others")
# merge everything (fastq, bam, vcf, others), their count and size
# and make a single row for each project-id
@@ -1766,9 +1718,7 @@ def orchestrate_get_files(
total_grouped_df = group_by_project_and_rename(merged_df, "total")
unique_sum_df = calculate_totals(unique_grouped_df, "unique")
total_sum_df = calculate_totals(total_grouped_df, "total")
- merged_total_df = merge_together_add_empty_rows(
- unique_sum_df, total_sum_df
- )
+ merged_total_df = merge_together_add_empty_rows(unique_sum_df, total_sum_df)
final_all_projs_df = add_empty_projs_back_in(empty_projs, merged_total_df)
final_dict = put_into_dict(final_all_projs_df)
diff --git a/Ploutos/scripts/__init__.py b/scripts/__init__.py
similarity index 100%
rename from Ploutos/scripts/__init__.py
rename to scripts/__init__.py
diff --git a/Ploutos/scripts/constants.py b/scripts/constants.py
similarity index 94%
rename from Ploutos/scripts/constants.py
rename to scripts/constants.py
index aa09f44..13c7028 100644
--- a/Ploutos/scripts/constants.py
+++ b/scripts/constants.py
@@ -109,9 +109,7 @@
"format": "{stack}",
},
},
- "setOptions": {
- "lang": {"thousandsSep": ",", "noData": "No data to display"}
- },
+ "setOptions": {"lang": {"thousandsSep": ",", "noData": "No data to display"}},
"plotOptions": {"column": {"stacking": "normal"}},
"series": "",
"tooltip": {
@@ -132,9 +130,7 @@
"min": 0,
"title": {"text": "Total costs ($)"},
},
- "setOptions": {
- "lang": {"thousandsSep": ",", "noData": "No data to display"}
- },
+ "setOptions": {"lang": {"thousandsSep": ",", "noData": "No data to display"}},
"plotOptions": {"column": {"stacking": "normal"}},
"series": [],
"tooltip": {"pointFormat": "{series.name}: ${point.y:.2f}"},
@@ -158,9 +154,7 @@
"format": "{stack}",
},
},
- "setOptions": {
- "lang": {"thousandsSep": ",", "noData": "No data to display"}
- },
+ "setOptions": {"lang": {"thousandsSep": ",", "noData": "No data to display"}},
"plotOptions": {"column": {"stacking": "normal"}},
"series": "",
"tooltip": {
@@ -227,9 +221,7 @@
"format": "{stack}",
},
},
- "setOptions": {
- "lang": {"thousandsSep": ",", "noData": "No data to display"}
- },
+ "setOptions": {"lang": {"thousandsSep": ",", "noData": "No data to display"}},
"plotOptions": {"column": {"stacking": "normal"}},
"exporting": {
"buttons": {
diff --git a/Ploutos/scripts/executions_plots.py b/scripts/executions_plots.py
similarity index 93%
rename from Ploutos/scripts/executions_plots.py
rename to scripts/executions_plots.py
index 368a4ed..87982b2 100644
--- a/Ploutos/scripts/executions_plots.py
+++ b/scripts/executions_plots.py
@@ -106,9 +106,7 @@ def daily_filtered_by(
series_data: list[dict] = []
project_types_list: list[str] = (
- [s.strip() for s in project_types.split(",")]
- if project_types
- else []
+ [s.strip() for s in project_types.split(",")] if project_types else []
)
assay_types_list: list[str] = (
[s.strip() for s in assay_types.split(",")] if assay_types else []
@@ -149,17 +147,12 @@ def daily_filtered_by(
for row in db_query:
r_date, cost = row
- detail_dict[f"{project}-{assay}"][str(r_date)] = round(
- cost, 2
- )
+ detail_dict[f"{project}-{assay}"][str(r_date)] = round(cost, 2)
# change datetime into unix format (recognizable for HighChart)
db_query = [
(
- datetime.combine(
- d, datetime.min.time()
- ).timestamp()
- * 1000,
+ datetime.combine(d, datetime.min.time()).timestamp() * 1000,
cost,
)
for d, cost in db_query
@@ -223,8 +216,7 @@ def daily_filtered_by(
# format for plot series data
db_query: list(tuple) = [
(
- datetime.combine(d, datetime.min.time()).timestamp()
- * 1000,
+ datetime.combine(d, datetime.min.time()).timestamp() * 1000,
cost,
)
for d, cost in db_query
@@ -286,8 +278,7 @@ def daily_filtered_by(
# change datetime into unix format
db_query = [
(
- datetime.combine(d, datetime.min.time()).timestamp()
- * 1000,
+ datetime.combine(d, datetime.min.time()).timestamp() * 1000,
cost,
)
for d, cost in db_query
@@ -298,9 +289,7 @@ def daily_filtered_by(
compute_data = {
"name": f"{assay}",
"data": db_query,
- "color": self.assay_colour_dict.get(
- assay, self.assay_colours[0]
- ),
+ "color": self.assay_colour_dict.get(assay, self.assay_colours[0]),
}
series_data.append(compute_data)
@@ -341,9 +330,7 @@ def daily_filtered_by(
# change datetime into unix format
db_query: list(tuple) = [
(
- (
- datetime.combine(d, datetime.min.time()) - EPOCH
- ).total_seconds()
+ (datetime.combine(d, datetime.min.time()) - EPOCH).total_seconds()
* 1000,
cost,
)
@@ -359,9 +346,7 @@ def daily_filtered_by(
for row in db_query:
detail_datatable.append(
[
- datetime.fromtimestamp(row[0] / 1000).strftime(
- "%Y-%m-%d"
- ),
+ datetime.fromtimestamp(row[0] / 1000).strftime("%Y-%m-%d"),
round(row[1], 2),
]
)
@@ -431,9 +416,7 @@ def monthly_filterby(
series_data: dict[str, list] = collections.defaultdict(list)
detail_datatable = []
- detail_dict = collections.defaultdict(
- lambda: collections.defaultdict(int)
- )
+ detail_dict = collections.defaultdict(lambda: collections.defaultdict(int))
# for each month [Dec 2022, Jan 2023, Feb 2023...]
# get aggregated ComputeCost SUM
@@ -461,18 +444,14 @@ def monthly_filterby(
f"{current_month.year}-{current_month.month}"
] = round(0.0)
else:
- series_data[pa_key].append(
- filtered_query["total_cost"]
- )
+ series_data[pa_key].append(filtered_query["total_cost"])
detail_dict[pa_key][
f"{current_month.year}-{current_month.month}"
] = round(filtered_query["total_cost"], 2)
# filter by project
elif project_types:
- detail_table_list = [
- f"{current_month.year}-{current_month.month}"
- ]
+ detail_table_list = [f"{current_month.year}-{current_month.month}"]
cols = project_types
@@ -490,16 +469,12 @@ def monthly_filterby(
detail_table_list.append(0.0)
else:
series_data[type].append(filtered_query["total_cost"])
- detail_table_list.append(
- round(filtered_query["total_cost"], 2)
- )
+ detail_table_list.append(round(filtered_query["total_cost"], 2))
detail_datatable.append(detail_table_list)
else:
- detail_table_list = [
- f"{current_month.year}-{current_month.month}"
- ]
+ detail_table_list = [f"{current_month.year}-{current_month.month}"]
cols = assay_types
# filter by assay
@@ -517,9 +492,7 @@ def monthly_filterby(
detail_table_list.append(0.0)
else:
series_data[assay].append(filtered_query["total_cost"])
- detail_table_list.append(
- round(filtered_query["total_cost"], 2)
- )
+ detail_table_list.append(round(filtered_query["total_cost"], 2))
detail_datatable.append(detail_table_list)
@@ -591,9 +564,7 @@ def default_daily_nofilter_allprojects(self) -> dict:
db_query = [
(
- (
- datetime.combine(d, datetime.min.time()) - EPOCH
- ).total_seconds()
+ (datetime.combine(d, datetime.min.time()) - EPOCH).total_seconds()
* 1000,
cost,
)
@@ -627,9 +598,7 @@ def default_daily_nofilter_allprojects(self) -> dict:
return context
- def monthly_nofilter_allprojects(
- self, month_start: str, month_end: str
- ) -> dict:
+ def monthly_nofilter_allprojects(self, month_start: str, month_end: str) -> dict:
"""
Sets context to all projects all months
when no project types or assay types selected.
@@ -652,9 +621,7 @@ def monthly_nofilter_allprojects(
"""
db_query = (
- ComputeCosts.objects.filter(
- date__date__range=[month_start, month_end]
- )
+ ComputeCosts.objects.filter(date__date__range=[month_start, month_end])
.values("date__date__month", "date__date__year")
.annotate(total_cost=Sum("total_cost"))
)
diff --git a/Ploutos/scripts/file_plots.py b/scripts/file_plots.py
similarity index 89%
rename from Ploutos/scripts/file_plots.py
rename to scripts/file_plots.py
index 5d095bf..7860835 100644
--- a/Ploutos/scripts/file_plots.py
+++ b/scripts/file_plots.py
@@ -102,13 +102,9 @@ def get_size(
# count the live size and archived size for diff file-type
data_store["live_size"] += item["file_state__file_size_live"]
- data_store["archived_size"] += item[
- "file_state__file_size_archived"
- ]
+ data_store["archived_size"] += item["file_state__file_size_archived"]
data_store["live_count"] += item["file_state__file_count_live"]
- data_store["archived_count"] += item[
- "file_state__file_count_archived"
- ]
+ data_store["archived_count"] += item["file_state__file_count_archived"]
data_store["file_type"] = file_type.upper()
@@ -194,9 +190,7 @@ def get_size(
xaxis = ["BAM", "FASTQ", "VCF", "Others"]
- elif (project_type and not assay_type) or (
- not project_type and assay_type
- ):
+ elif (project_type and not assay_type) or (not project_type and assay_type):
# either project filter or assay filter
detail_datatable_size = []
detail_datatable_count = []
@@ -247,29 +241,19 @@ def get_size(
data_store = others_size
# make project-level dict for project-level table
- project_level_details[item["project_id__name"]][
- file_type
- ] = {
+ project_level_details[item["project_id__name"]][file_type] = {
"live_size": item["file_state__file_size_live"],
- "archived_size": item[
- "file_state__file_size_archived"
- ],
+ "archived_size": item["file_state__file_size_archived"],
"live_count": item["file_state__file_count_live"],
- "archived_count": item[
- "file_state__file_count_archived"
- ],
+ "archived_count": item["file_state__file_count_archived"],
}
# count the live size and archived size for diff file-type
- data_store["live_size"] += item[
- "file_state__file_size_live"
- ]
+ data_store["live_size"] += item["file_state__file_size_live"]
data_store["archived_size"] += item[
"file_state__file_size_archived"
]
- data_store["live_count"] += item[
- "file_state__file_count_live"
- ]
+ data_store["live_count"] += item["file_state__file_count_live"]
data_store["archived_count"] += item[
"file_state__file_count_archived"
]
@@ -296,9 +280,7 @@ def get_size(
type, self.project_colours[count]
)
if project_type
- else self.assay_colour_dict.get(
- type, self.assay_colours[count]
- ),
+ else self.assay_colour_dict.get(type, self.assay_colours[count]),
}
archived_data = {
@@ -318,9 +300,7 @@ def get_size(
type, self.project_colours[count]
)
if project_type
- else self.assay_colour_dict.get(
- type, self.assay_colours[count]
- ),
+ else self.assay_colour_dict.get(type, self.assay_colours[count]),
"opacity": 0.8,
}
@@ -340,9 +320,7 @@ def get_size(
type, self.project_colours[count]
)
if project_type
- else self.assay_colour_dict.get(
- type, self.assay_colours[count]
- ),
+ else self.assay_colour_dict.get(type, self.assay_colours[count]),
}
archived_data = {
@@ -359,9 +337,7 @@ def get_size(
type, self.project_colours[count]
)
if project_type
- else self.assay_colour_dict.get(
- type, self.assay_colours[count]
- ),
+ else self.assay_colour_dict.get(type, self.assay_colours[count]),
"opacity": 0.8,
}
@@ -395,9 +371,7 @@ def get_size(
others_size["archived_count"],
]
- partial_count_datatable = [
- f"{d:,}" for d in partial_count_datatable
- ]
+ partial_count_datatable = [f"{d:,}" for d in partial_count_datatable]
partial_count_datatable.insert(0, type)
detail_datatable_size.append(partial_size_datatable)
@@ -439,29 +413,19 @@ def get_size(
for item in project_level_query:
# create a project level detail table later on
# we need each projects's file state
- project_level_details[item["project_id__name"]][
- file_type
- ] = {
+ project_level_details[item["project_id__name"]][file_type] = {
"live_size": item["file_state__file_size_live"],
- "archived_size": item[
- "file_state__file_size_archived"
- ],
+ "archived_size": item["file_state__file_size_archived"],
"live_count": item["file_state__file_count_live"],
- "archived_count": item[
- "file_state__file_count_archived"
- ],
+ "archived_count": item["file_state__file_count_archived"],
}
# adding up the size and count for file types
- data_store["live_size"] += item[
- "file_state__file_size_live"
- ]
+ data_store["live_size"] += item["file_state__file_size_live"]
data_store["archived_size"] += item[
"file_state__file_size_archived"
]
- data_store["live_count"] += item[
- "file_state__file_count_live"
- ]
+ data_store["live_count"] += item["file_state__file_count_live"]
data_store["archived_count"] += item[
"file_state__file_count_archived"
]
@@ -533,9 +497,7 @@ def get_size(
]
)
- if (project_type and assay_type) or (
- not project_type and not assay_type
- ):
+ if (project_type and assay_type) or (not project_type and not assay_type):
# both project and assay input or none
detail_datatable_size = [
bam_size["live_size"],
@@ -561,9 +523,7 @@ def get_size(
detail_datatable_size = [
[round(d / 1024, 2) for d in detail_datatable_size]
]
- detail_datatable_count = [
- [f"{d:,}" for d in detail_datatable_count]
- ]
+ detail_datatable_count = [[f"{d:,}" for d in detail_datatable_count]]
# this bool is needed in the FE for the cols header for detail
# datatable
diff --git a/Ploutos/scripts/leaderboard_plots.py b/scripts/leaderboard_plots.py
similarity index 95%
rename from Ploutos/scripts/leaderboard_plots.py
rename to scripts/leaderboard_plots.py
index b412b55..93a420e 100644
--- a/Ploutos/scripts/leaderboard_plots.py
+++ b/scripts/leaderboard_plots.py
@@ -41,7 +41,6 @@ def __init__(self) -> None:
# So don't change on diff numbers of types/assays during filtering
self.proj_colour_dict = settings.PROJ_COLOUR_DICT
self.assay_colour_dict = settings.ASSAY_COLOUR_DICT
- self.user_colour_dict = settings.USER_COLOUR_DICT
def daily_filter_byproject(
self, month_start: date, month_end: date, project_types: list
@@ -130,9 +129,7 @@ def daily_filter_byproject(
if cost_list:
xaxes.update(
(
- datetime.combine(
- row["date__date"], datetime.min.time()
- )
+ datetime.combine(row["date__date"], datetime.min.time())
- EPOCH
).total_seconds()
* 1000
@@ -142,9 +139,7 @@ def daily_filter_byproject(
date_cost = [
(
(
- datetime.combine(
- row["date__date"], datetime.min.time()
- )
+ datetime.combine(row["date__date"], datetime.min.time())
- EPOCH
).total_seconds()
* 1000,
@@ -159,9 +154,9 @@ def daily_filter_byproject(
if cost > 0.0:
date_cost_aggregate[str(cost_date)] += cost
- date_cost_breakdown_aggregate[
- str(cost_date)
- ].append(f"{user} {round(cost, 2)}")
+ date_cost_breakdown_aggregate[str(cost_date)].append(
+ f"{user} {round(cost, 2)}"
+ )
category_data_source.append(
{
@@ -238,9 +233,7 @@ def default_daily_nofilter_allprojects(
xaxes = sorted(
[
- (
- datetime.combine(d, datetime.min.time()) - EPOCH
- ).total_seconds()
+ (datetime.combine(d, datetime.min.time()) - EPOCH).total_seconds()
* 1000
for d in all_daily_dates
]
@@ -263,9 +256,7 @@ def default_daily_nofilter_allprojects(
user_daily_cost_dict[user].append(cost_spent)
if cost_spent > 0.0:
- daily_cost_user_breakdown[f"{day}"].append(
- f"{user} {cost_spent}"
- )
+ daily_cost_user_breakdown[f"{day}"].append(f"{user} {cost_spent}")
daily_cost += cost_spent
daily_cost_dict[f"{day}"] = daily_cost
@@ -297,9 +288,7 @@ def default_daily_nofilter_allprojects(
]
)
- project_datatable = self.get_project_level_details(
- month_start, month_end
- )
+ project_datatable = self.get_project_level_details(month_start, month_end)
context = {
"plot_series": plot_series,
@@ -361,10 +350,7 @@ def daily_filter_byusers(self, month_start, month_end, user_types):
if cost_list:
xaxes.update(
(
- datetime.combine(
- row["date__date"], datetime.min.time()
- )
- - EPOCH
+ datetime.combine(row["date__date"], datetime.min.time()) - EPOCH
).total_seconds()
* 1000
for row in cost_list
@@ -373,9 +359,7 @@ def daily_filter_byusers(self, month_start, month_end, user_types):
date_cost = [
(
(
- datetime.combine(
- row["date__date"], datetime.min.time()
- )
+ datetime.combine(row["date__date"], datetime.min.time())
- EPOCH
).total_seconds()
* 1000,
@@ -520,9 +504,7 @@ def daily_filter_byusers_and_projects(
xaxes.update(
[
(
- datetime.combine(
- row["date__date"], datetime.min.time()
- )
+ datetime.combine(row["date__date"], datetime.min.time())
- EPOCH
).total_seconds()
* 1000
@@ -533,9 +515,7 @@ def daily_filter_byusers_and_projects(
date_cost = [
(
(
- datetime.combine(
- row["date__date"], datetime.min.time()
- )
+ datetime.combine(row["date__date"], datetime.min.time())
- EPOCH
).total_seconds()
* 1000,
@@ -550,9 +530,9 @@ def daily_filter_byusers_and_projects(
if cost > 0.0:
date_cost_aggregate[str(cost_date)] += cost
- date_cost_breakdown_aggregate[
- str(cost_date)
- ].append(f"{user} {round(cost, 2)}")
+ date_cost_breakdown_aggregate[str(cost_date)].append(
+ f"{user} {round(cost, 2)}"
+ )
random_color = generate_random_rgb_color()
while random_color in color_dedup:
@@ -635,9 +615,7 @@ def Users_allprojects(self, month_start: str, month_end: str) -> dict:
detail_datatable = []
- for row in sorted(
- db_query, key=lambda x: x["total_cost__sum"], reverse=True
- ):
+ for row in sorted(db_query, key=lambda x: x["total_cost__sum"], reverse=True):
detail_datatable.append(
[
row["launched_by__user_name"],
@@ -655,9 +633,7 @@ def Users_allprojects(self, month_start: str, month_end: str) -> dict:
}
]
- project_datatable = self.get_project_level_details(
- month_start, month_end
- )
+ project_datatable = self.get_project_level_details(month_start, month_end)
context = {
"plot_series": category_data_source,
@@ -721,9 +697,7 @@ def monthly_filter_byproject(self, month_start, month_end, project_types):
user_costs: list(tuple) = []
while current_month <= month_end:
- current_month_end_date = current_month + relativedelta(
- months=1
- )
+ current_month_end_date = current_month + relativedelta(months=1)
if current_month_end_date > month_end:
current_month_end_date = month_end
@@ -986,9 +960,7 @@ def monthly_nofilter_allprojects(self, month_start, month_end):
"total_cost",
)
- list_of_users = set(
- row["launched_by_id__user_name"] for row in db_query
- )
+ list_of_users = set(row["launched_by_id__user_name"] for row in db_query)
db_min_date = db_query.first()
min_date = date(
@@ -1029,9 +1001,7 @@ def monthly_nofilter_allprojects(self, month_start, month_end):
if filtered_query:
total_cost = sum(r["total_cost"] for r in filtered_query)
for row in filtered_query:
- detail_datadict[str(current_month)] += row[
- "total_cost"
- ]
+ detail_datadict[str(current_month)] += row["total_cost"]
cost_data.append(total_cost)
else:
@@ -1048,9 +1018,7 @@ def monthly_nofilter_allprojects(self, month_start, month_end):
for k, v in detail_datadict.items()
]
- project_datatable = self.get_project_level_details(
- month_start, month_end
- )
+ project_datatable = self.get_project_level_details(month_start, month_end)
context = {
"plot_series": category_data_source,
@@ -1174,9 +1142,7 @@ def top_most_costly_job(self, month_start, month_end) -> Union[str, str]:
if not ComputeCosts.objects.exists():
return "Not Determined", "0"
User_totals = (
- ComputeCosts.objects.filter(
- date__date__range=[month_start, month_end]
- )
+ ComputeCosts.objects.filter(date__date__range=[month_start, month_end])
.values("launched_by__user_name")
.annotate(Cost=Sum("total_cost"))
.order_by("-Cost")[0]
diff --git a/Ploutos/scripts/populate_db.py b/scripts/populate_db.py
similarity index 96%
rename from Ploutos/scripts/populate_db.py
rename to scripts/populate_db.py
index 0b312de..cb34e05 100644
--- a/Ploutos/scripts/populate_db.py
+++ b/scripts/populate_db.py
@@ -34,7 +34,7 @@ def populate_projects(all_projects: dict) -> None:
all_projects : collections.defaultdict
dict with project as key and relevant file info
"""
- print('populate projects in db')
+ print("populate projects in db")
# Iterate over list of project dicts
for entry in all_projects:
# Add users to users table to create IDs
@@ -80,7 +80,7 @@ def populate_running_totals() -> None:
getting the date IDs or creating them first.
"""
- print('populate running totals in db')
+ print("populate running totals in db")
# Get today's date in YYY-MM-DD format
today_date = queries.no_of_days_in_month()[0]
@@ -204,9 +204,7 @@ def populate_file_types(file_type_df) -> None:
)
# Do same for Others
- others_file_type, _ = FileTypes.objects.get_or_create(
- file_type="others"
- )
+ others_file_type, _ = FileTypes.objects.get_or_create(file_type="others")
others_state, _ = FileTypeState.objects.get_or_create(
file_type=others_file_type,
@@ -319,9 +317,9 @@ def populate_tars_table() -> None:
today_date, _ = queries.no_of_days_in_month()
for project in data:
- created_on = dt.datetime.fromtimestamp(
- project["created"] / 1000
- ).strftime("%Y-%m-%d")
+ created_on = dt.datetime.fromtimestamp(project["created"] / 1000).strftime(
+ "%Y-%m-%d"
+ )
created_date, _ = Dates.objects.get_or_create(
date=created_on,
@@ -357,9 +355,7 @@ def run():
populate_running_totals()
- final_dict, file_type_df = queries.orchestrate_get_files(
- proj_list, proj_df
- )
+ final_dict, file_type_df = queries.orchestrate_get_files(proj_list, proj_df)
# populate StorageCost
populate_database_files(final_dict)
populate_file_types(file_type_df)
diff --git a/Ploutos/scripts/running_total_plots.py b/scripts/running_total_plots.py
similarity index 96%
rename from Ploutos/scripts/running_total_plots.py
rename to scripts/running_total_plots.py
index 1a9acd6..03de86b 100644
--- a/Ploutos/scripts/running_total_plots.py
+++ b/scripts/running_total_plots.py
@@ -24,9 +24,7 @@ def calculate_diffs(self, tuple_of_charges):
charge_diff : list
list with charge differences from the previous day
"""
- charge_diff = [
- y - x for x, y in zip(tuple_of_charges, tuple_of_charges[1:])
- ]
+ charge_diff = [y - x for x, y in zip(tuple_of_charges, tuple_of_charges[1:])]
return charge_diff
@@ -79,8 +77,7 @@ def daily_plot(self, start_date: date, end_date: date) -> dict:
epoch = datetime.utcfromtimestamp(0)
date_xaxis = [
- (datetime.combine(d, datetime.min.time()) - epoch).total_seconds()
- * 1000
+ (datetime.combine(d, datetime.min.time()) - epoch).total_seconds() * 1000
for d in dates
]
@@ -225,9 +222,7 @@ def monthly_between_dates(self, start_date: date, end_date: date) -> dict:
# if it's the last month e.g. Feb 2023 and no Mar 2023 data
# calculate usage by MAX of Feb - MIN of Feb
while current_date < end_date:
- current_year_month_key = (
- f"{current_date.month}-{current_date.year}"
- )
+ current_year_month_key = f"{current_date.month}-{current_date.year}"
xaxis.append(current_date)
if (current_date.month == end_date.month) and (
@@ -265,9 +260,7 @@ def monthly_between_dates(self, start_date: date, end_date: date) -> dict:
current_date += relativedelta(months=1)
# plot x-axis
- human_readable_xaxis = [
- month.strftime("%b-%Y") for month in sorted(xaxis)
- ]
+ human_readable_xaxis = [month.strftime("%b-%Y") for month in sorted(xaxis)]
sortable_xaxis = [month.strftime("%Y-%m") for month in sorted(xaxis)]
# round up charges
diff --git a/Ploutos/scripts/slack.py b/scripts/slack.py
similarity index 100%
rename from Ploutos/scripts/slack.py
rename to scripts/slack.py
diff --git a/Ploutos/scripts/storage_plots.py b/scripts/storage_plots.py
similarity index 90%
rename from Ploutos/scripts/storage_plots.py
rename to scripts/storage_plots.py
index 42058fb..761ac53 100644
--- a/Ploutos/scripts/storage_plots.py
+++ b/scripts/storage_plots.py
@@ -44,13 +44,9 @@ def get_latest_total_unique_size(self):
either total size in TiB of all the archived files in DNAnexus
or "Not yet calculated" if script not run today
"""
- latest_date = str(
- StorageCosts.objects.order_by("date__date").last().date
- )
+ latest_date = str(StorageCosts.objects.order_by("date__date").last().date)
- latest_total = StorageCosts.objects.filter(
- date__date=latest_date
- ).aggregate(
+ latest_total = StorageCosts.objects.filter(date__date=latest_date).aggregate(
total_live=Sum("unique_size_live"),
total_archived=Sum("unique_size_archived"),
)
@@ -215,9 +211,7 @@ def month_range_assay_type_and_proj_type(
plot_series = [
{
"name": f"{project_type}*{assay_type}",
- "data": [
- round(v["live_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["live_cost"], 2) for _, v in plot_data.items()],
"stack": "Live",
"color": self.proj_colour_dict.get(
project_type, self.project_colours[0]
@@ -225,9 +219,7 @@ def month_range_assay_type_and_proj_type(
},
{
"name": f"{project_type}*{assay_type}",
- "data": [
- round(v["archived_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["archived_cost"], 2) for _, v in plot_data.items()],
"stack": "Archived",
"linkedTo": ":previous",
"color": self.proj_colour_dict.get(
@@ -313,9 +305,7 @@ def month_range_only_project_types(
# # Get bar colour from dict or iterate over project_colours
live_data = {
"name": f"{proj_type}",
- "data": [
- round(v["live_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["live_cost"], 2) for _, v in plot_data.items()],
"stack": "Live",
"color": self.proj_colour_dict.get(
proj_type, self.project_colours[count]
@@ -324,9 +314,7 @@ def month_range_only_project_types(
archived_data = {
"name": f"{proj_type}",
- "data": [
- round(v["archived_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["archived_cost"], 2) for _, v in plot_data.items()],
"stack": "Archived",
"linkedTo": ":previous",
"color": self.proj_colour_dict.get(
@@ -420,25 +408,17 @@ def month_range_only_assay_types(
# # Get bar colour from dict or iterate over project_colours
live_data = {
"name": f"{assay}",
- "data": [
- round(v["live_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["live_cost"], 2) for _, v in plot_data.items()],
"stack": "Live",
- "color": self.assay_colour_dict.get(
- assay, self.assay_colours[count]
- ),
+ "color": self.assay_colour_dict.get(assay, self.assay_colours[count]),
}
archived_data = {
"name": f"{assay}",
- "data": [
- round(v["archived_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["archived_cost"], 2) for _, v in plot_data.items()],
"stack": "Archived",
"linkedTo": ":previous",
- "color": self.assay_colour_dict.get(
- assay, self.assay_colours[count]
- ),
+ "color": self.assay_colour_dict.get(assay, self.assay_colours[count]),
"opacity": 0.8,
}
@@ -464,9 +444,7 @@ def month_range_only_assay_types(
return context
- def all_projects_between_months(
- self, month_start: date, month_end: date
- ) -> dict:
+ def all_projects_between_months(self, month_start: date, month_end: date) -> dict:
"""
Sets context for all projects between certain months
This is either specified by user or by default this
@@ -491,9 +469,7 @@ def all_projects_between_months(
'proj_level_df': pd.Dataframe the live + archived costs per project
"""
db_query = (
- StorageCosts.objects.filter(
- date__date__range=[month_start, month_end]
- )
+ StorageCosts.objects.filter(date__date__range=[month_start, month_end])
.order_by("date__date")
.values("project__name", "date__date")
.annotate(
@@ -512,17 +488,13 @@ def all_projects_between_months(
plot_series = [
{
"name": "All projects",
- "data": [
- round(v["live_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["live_cost"], 2) for _, v in plot_data.items()],
"stack": "Live",
"color": "rgb(27,158,119)",
},
{
"name": "All projects",
- "data": [
- round(v["archived_cost"], 2) for _, v in plot_data.items()
- ],
+ "data": [round(v["archived_cost"], 2) for _, v in plot_data.items()],
"stack": "Archived",
"linkedTo": ":previous",
"color": "rgb(27,158,119)",
diff --git a/Ploutos/scripts/test_cron.py b/scripts/test_cron.py
similarity index 100%
rename from Ploutos/scripts/test_cron.py
rename to scripts/test_cron.py
diff --git a/Ploutos/scripts/utils.py b/scripts/utils.py
similarity index 96%
rename from Ploutos/scripts/utils.py
rename to scripts/utils.py
index c9ac10a..9849376 100644
--- a/Ploutos/scripts/utils.py
+++ b/scripts/utils.py
@@ -93,6 +93,4 @@ def generate_random_rgb_color() -> str:
"""
This function output random rgb(aaa, bbb, ccc) when called
"""
- return "RGB({}, {}, {})".format(
- randint(0, 255), randint(0, 255), randint(0, 255)
- )
+ return "RGB({}, {}, {})".format(randint(0, 255), randint(0, 255), randint(0, 255))
diff --git a/Ploutos/static/css/bootstrap.min.css b/static/css/bootstrap.min.css
similarity index 100%
rename from Ploutos/static/css/bootstrap.min.css
rename to static/css/bootstrap.min.css
diff --git a/Ploutos/static/css/bootstrap.min.css.map b/static/css/bootstrap.min.css.map
similarity index 100%
rename from Ploutos/static/css/bootstrap.min.css.map
rename to static/css/bootstrap.min.css.map
diff --git a/Ploutos/static/css/style.css b/static/css/style.css
similarity index 100%
rename from Ploutos/static/css/style.css
rename to static/css/style.css
diff --git a/Ploutos/static/datatables/buttons.bootstrap5.min.css b/static/datatables/buttons.bootstrap5.min.css
similarity index 100%
rename from Ploutos/static/datatables/buttons.bootstrap5.min.css
rename to static/datatables/buttons.bootstrap5.min.css
diff --git a/Ploutos/static/datatables/buttons.bootstrap5.min.js b/static/datatables/buttons.bootstrap5.min.js
similarity index 100%
rename from Ploutos/static/datatables/buttons.bootstrap5.min.js
rename to static/datatables/buttons.bootstrap5.min.js
diff --git a/Ploutos/static/datatables/buttons.html5.min.js b/static/datatables/buttons.html5.min.js
similarity index 100%
rename from Ploutos/static/datatables/buttons.html5.min.js
rename to static/datatables/buttons.html5.min.js
diff --git a/Ploutos/static/datatables/colReorder.bootstrap5.min.css b/static/datatables/colReorder.bootstrap5.min.css
similarity index 100%
rename from Ploutos/static/datatables/colReorder.bootstrap5.min.css
rename to static/datatables/colReorder.bootstrap5.min.css
diff --git a/Ploutos/static/datatables/dataTables.bootstrap5.min.css b/static/datatables/dataTables.bootstrap5.min.css
similarity index 100%
rename from Ploutos/static/datatables/dataTables.bootstrap5.min.css
rename to static/datatables/dataTables.bootstrap5.min.css
diff --git a/Ploutos/static/datatables/dataTables.bootstrap5.min.js b/static/datatables/dataTables.bootstrap5.min.js
similarity index 100%
rename from Ploutos/static/datatables/dataTables.bootstrap5.min.js
rename to static/datatables/dataTables.bootstrap5.min.js
diff --git a/Ploutos/static/datatables/dataTables.buttons.min.js b/static/datatables/dataTables.buttons.min.js
similarity index 100%
rename from Ploutos/static/datatables/dataTables.buttons.min.js
rename to static/datatables/dataTables.buttons.min.js
diff --git a/Ploutos/static/datatables/dataTables.colReorder.min.js b/static/datatables/dataTables.colReorder.min.js
similarity index 100%
rename from Ploutos/static/datatables/dataTables.colReorder.min.js
rename to static/datatables/dataTables.colReorder.min.js
diff --git a/Ploutos/static/datatables/dataTables.fixedHeader.min.js b/static/datatables/dataTables.fixedHeader.min.js
similarity index 100%
rename from Ploutos/static/datatables/dataTables.fixedHeader.min.js
rename to static/datatables/dataTables.fixedHeader.min.js
diff --git a/Ploutos/static/datatables/dataTables.hyperLink.min.js b/static/datatables/dataTables.hyperLink.min.js
similarity index 100%
rename from Ploutos/static/datatables/dataTables.hyperLink.min.js
rename to static/datatables/dataTables.hyperLink.min.js
diff --git a/Ploutos/static/datatables/dataTables.responsive.min.js b/static/datatables/dataTables.responsive.min.js
similarity index 100%
rename from Ploutos/static/datatables/dataTables.responsive.min.js
rename to static/datatables/dataTables.responsive.min.js
diff --git a/Ploutos/static/datatables/dataTables.scroller.min.js b/static/datatables/dataTables.scroller.min.js
similarity index 100%
rename from Ploutos/static/datatables/dataTables.scroller.min.js
rename to static/datatables/dataTables.scroller.min.js
diff --git a/Ploutos/static/datatables/fixedHeader.bootstrap5.min.css b/static/datatables/fixedHeader.bootstrap5.min.css
similarity index 100%
rename from Ploutos/static/datatables/fixedHeader.bootstrap5.min.css
rename to static/datatables/fixedHeader.bootstrap5.min.css
diff --git a/Ploutos/static/datatables/jquery.dataTables.min.js b/static/datatables/jquery.dataTables.min.js
similarity index 100%
rename from Ploutos/static/datatables/jquery.dataTables.min.js
rename to static/datatables/jquery.dataTables.min.js
diff --git a/Ploutos/static/datatables/jszip.min.js b/static/datatables/jszip.min.js
similarity index 100%
rename from Ploutos/static/datatables/jszip.min.js
rename to static/datatables/jszip.min.js
diff --git a/Ploutos/static/datatables/responsive.bootstrap5.min.css b/static/datatables/responsive.bootstrap5.min.css
similarity index 100%
rename from Ploutos/static/datatables/responsive.bootstrap5.min.css
rename to static/datatables/responsive.bootstrap5.min.css
diff --git a/Ploutos/static/datatables/responsive.bootstrap5.min.js b/static/datatables/responsive.bootstrap5.min.js
similarity index 100%
rename from Ploutos/static/datatables/responsive.bootstrap5.min.js
rename to static/datatables/responsive.bootstrap5.min.js
diff --git a/Ploutos/static/datatables/scroller.bootstrap5.min.css b/static/datatables/scroller.bootstrap5.min.css
similarity index 100%
rename from Ploutos/static/datatables/scroller.bootstrap5.min.css
rename to static/datatables/scroller.bootstrap5.min.css
diff --git a/Ploutos/static/favicon.png b/static/favicon.png
similarity index 100%
rename from Ploutos/static/favicon.png
rename to static/favicon.png
diff --git a/Ploutos/static/fonts/NunitoSans-Black.ttf b/static/fonts/NunitoSans-Black.ttf
similarity index 100%
rename from Ploutos/static/fonts/NunitoSans-Black.ttf
rename to static/fonts/NunitoSans-Black.ttf
diff --git a/Ploutos/static/fonts/Roboto-Medium.ttf b/static/fonts/Roboto-Medium.ttf
similarity index 100%
rename from Ploutos/static/fonts/Roboto-Medium.ttf
rename to static/fonts/Roboto-Medium.ttf
diff --git a/Ploutos/static/js/accessibility.js b/static/js/accessibility.js
similarity index 100%
rename from Ploutos/static/js/accessibility.js
rename to static/js/accessibility.js
diff --git a/Ploutos/static/js/accessibility.min.js b/static/js/accessibility.min.js
similarity index 100%
rename from Ploutos/static/js/accessibility.min.js
rename to static/js/accessibility.min.js
diff --git a/Ploutos/static/js/bootstrap.min.js b/static/js/bootstrap.min.js
similarity index 100%
rename from Ploutos/static/js/bootstrap.min.js
rename to static/js/bootstrap.min.js
diff --git a/Ploutos/static/js/bootstrap.min.js.map b/static/js/bootstrap.min.js.map
similarity index 100%
rename from Ploutos/static/js/bootstrap.min.js.map
rename to static/js/bootstrap.min.js.map
diff --git a/Ploutos/static/js/export-data.js b/static/js/export-data.js
similarity index 100%
rename from Ploutos/static/js/export-data.js
rename to static/js/export-data.js
diff --git a/Ploutos/static/js/export-data.min.js b/static/js/export-data.min.js
similarity index 100%
rename from Ploutos/static/js/export-data.min.js
rename to static/js/export-data.min.js
diff --git a/Ploutos/static/js/exporting.js b/static/js/exporting.js
similarity index 100%
rename from Ploutos/static/js/exporting.js
rename to static/js/exporting.js
diff --git a/Ploutos/static/js/exporting.min.js b/static/js/exporting.min.js
similarity index 100%
rename from Ploutos/static/js/exporting.min.js
rename to static/js/exporting.min.js
diff --git a/Ploutos/static/js/highcharts.min.js b/static/js/highcharts.min.js
similarity index 100%
rename from Ploutos/static/js/highcharts.min.js
rename to static/js/highcharts.min.js
diff --git a/Ploutos/static/js/jquery-3.6.0.min.js b/static/js/jquery-3.6.0.min.js
similarity index 100%
rename from Ploutos/static/js/jquery-3.6.0.min.js
rename to static/js/jquery-3.6.0.min.js
diff --git a/Ploutos/static/js/no-data-to-display.js b/static/js/no-data-to-display.js
similarity index 100%
rename from Ploutos/static/js/no-data-to-display.js
rename to static/js/no-data-to-display.js
diff --git a/Ploutos/static/js/no-data-to-display.min.js b/static/js/no-data-to-display.min.js
similarity index 100%
rename from Ploutos/static/js/no-data-to-display.min.js
rename to static/js/no-data-to-display.min.js
diff --git a/Ploutos/static/svg/bank2.svg b/static/svg/bank2.svg
similarity index 100%
rename from Ploutos/static/svg/bank2.svg
rename to static/svg/bank2.svg
diff --git a/Ploutos/static/svg/cloud-arrow-up.svg b/static/svg/cloud-arrow-up.svg
similarity index 100%
rename from Ploutos/static/svg/cloud-arrow-up.svg
rename to static/svg/cloud-arrow-up.svg
diff --git a/Ploutos/static/svg/file-earmark-bar-graph.svg b/static/svg/file-earmark-bar-graph.svg
similarity index 100%
rename from Ploutos/static/svg/file-earmark-bar-graph.svg
rename to static/svg/file-earmark-bar-graph.svg
diff --git a/Ploutos/static/svg/piggy-bank.svg b/static/svg/piggy-bank.svg
similarity index 100%
rename from Ploutos/static/svg/piggy-bank.svg
rename to static/svg/piggy-bank.svg
diff --git a/templates/registration/login.html b/templates/registration/login.html
new file mode 100644
index 0000000..9d3c6c1
--- /dev/null
+++ b/templates/registration/login.html
@@ -0,0 +1,15 @@
+{% extends 'base.html' %}
+
+{% block content %}
+{% load crispy_forms_tags %}
+
+{% endblock %}
\ No newline at end of file