Skip to content

Commit

Permalink
Merge pull request #132 from amor71/anchor-vwap
Browse files Browse the repository at this point in the history
Anchor vwap
  • Loading branch information
amor71 authored Nov 21, 2020
2 parents d311345 + d2ed03a commit ad6d794
Show file tree
Hide file tree
Showing 15 changed files with 492 additions and 49 deletions.
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@
/legacy/
/tools/set_keys.bash
/tools/fix_git_email.bash
/analyis/.ipynb_checkpoints/
/analysis/.ipynb_checkpoints/
/liualgotrader/__pycache__/
/tests/__pycache__/
/.idea/
/liualgotrader/scanners/__pycache__/
/liualgotrader/common/__pycache__/
/docs/build/
/analyis/notebooks/.ipynb_checkpoints
/analysis/notebooks/.ipynb_checkpoints

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"metadata": {},
"outputs": [],
"source": [
"batch_id = \"8048cc68-9e69-490c-8e3b-c977453749f8\""
"batch_id = \"5ebd4bf8-728f-405c-b8f5-d18a280b98d9\""
]
},
{
Expand Down
File renamed without changes.
326 changes: 326 additions & 0 deletions analysis/notebooks/distributions.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,326 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Take Action: Select schedule to analyze, and environment"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"start_date = \"2020-11-2\"\n",
"end_date = None # = now()!\n",
"env = \"PAPER\" # values may be PAPER / PROD / BACKTEST"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Imports"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import asyncio\n",
"import json\n",
"import math\n",
"import sys\n",
"from datetime import date, datetime, timedelta\n",
"\n",
"import alpaca_trade_api as tradeapi\n",
"import matplotlib.pyplot as plt\n",
"import nest_asyncio\n",
"import numpy as np\n",
"import pandas as pd\n",
"import pytz\n",
"import requests\n",
"from dateutil import parser\n",
"from IPython.display import HTML, display, Markdown\n",
"from liualgotrader.analytics import analysis\n",
"from pytz import timezone\n",
"\n",
"import sklearn\n",
"from sklearn.cluster import Birch\n",
"print(f\"SKLEARN Version {sklearn.__version__}\")\n",
"\n",
"%matplotlib inline\n",
"\n",
"\n",
"nest_asyncio.apply()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Load trading day details"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"day_to_analyze = datetime.strptime(start_date, \"%Y-%m-%d\")\n",
"end_to_analyze = datetime.strptime(edn_date, \"%Y-%m-%d\") if end_date else datetime.now()\n",
"trades = analysis.load_trades(day_to_analyze, env, end_to_analyze)\n",
"algo_runs = analysis.load_runs(day_to_analyze, env, end_to_analyze)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Show trading session performance"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"symbol_name = []\n",
"counts = []\n",
"revenues = []\n",
"est = pytz.timezone(\"US/Eastern\")\n",
"batch_ids = trades.batch_id.unique().tolist()\n",
"# batch_ids.reverse()\n",
"\n",
"current_max = pd.options.display.max_rows\n",
"pd.set_option(\"display.max_rows\", None)\n",
"for batch_id in batch_ids:\n",
" how_was_my_day = pd.DataFrame()\n",
" how_was_my_day[\"symbol\"] = trades.loc[trades[\"batch_id\"] == batch_id][\n",
" \"symbol\"\n",
" ].unique()\n",
" how_was_my_day[\"revenues\"] = how_was_my_day[\"symbol\"].apply(\n",
" lambda x: analysis.calc_batch_revenue(x, trades, batch_id)\n",
" )\n",
" how_was_my_day[\"count\"] = how_was_my_day[\"symbol\"].apply(\n",
" lambda x: analysis.count_trades(x, trades, batch_id)\n",
" )\n",
"\n",
" if len(algo_runs.loc[algo_runs[\"batch_id\"] == batch_id].start_time) > 0:\n",
" batch_time = (\n",
" algo_runs.loc[algo_runs[\"batch_id\"] == batch_id]\n",
" .start_time.min()\n",
" .tz_localize(\"utc\")\n",
" .astimezone(est)\n",
" )\n",
" else:\n",
" continue\n",
"\n",
" env = algo_runs[algo_runs[\"batch_id\"] == batch_id].algo_env.tolist()[0]\n",
" win_ratio = round(\n",
" 1.0\n",
" * len(how_was_my_day[how_was_my_day.revenues >= 0])\n",
" / len(how_was_my_day[how_was_my_day.revenues < 0]),\n",
" 2,\n",
" )\n",
" revenues = round(sum(how_was_my_day[\"revenues\"]), 2)\n",
" traded = len(how_was_my_day)\n",
" print(\n",
" f\"[{env}] {batch_id}\\n{batch_time}\\nTotal revenues=${revenues}\\nTotal traded:{traded} Win/Lose ration {win_ratio}\"\n",
" )\n",
"\n",
" display(\n",
" Markdown(f\"{len(how_was_my_day[how_was_my_day.revenues >= 0])} **Winners**\")\n",
" )\n",
" display(how_was_my_day[how_was_my_day.revenues >= 0].sort_values('revenues'))\n",
" display(Markdown(f\"{len(how_was_my_day[how_was_my_day.revenues < 0])} **Lossers**\"))\n",
" display(how_was_my_day[how_was_my_day.revenues < 0].sort_values('revenues'))\n",
"\n",
"pd.set_option(\"display.max_rows\", current_max)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Time analysis"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"time_model = pd.DataFrame()\n",
"\n",
"time_model[\"symbol\"] = trades.symbol.unique()\n",
"time_model[\"time\"] = time_model[\"symbol\"].apply(\n",
" lambda x: (\n",
" pytz.utc.localize(\n",
" pd.to_datetime(trades.loc[trades.symbol == x].tstamp.min())\n",
" ).astimezone(est)\n",
" - day_to_analyze.replace(hour=9, minute=30, tzinfo=est)\n",
" ).total_seconds()\n",
")\n",
"time_model[\"revenue\"] = time_model[\"symbol\"].apply(\n",
" lambda x: sum(\n",
" [analysis.calc_batch_revenue(x, trades, batch_id) for batch_id in batch_ids]\n",
" )\n",
")\n",
"time_model[\"algo\"] = time_model[\"symbol\"].apply(\n",
" lambda x: trades.loc[trades.symbol == x].algo_name.unique().tolist()\n",
")\n",
"time_model = time_model.sort_values(\"time\")\n",
"time_model[\"total\"] = time_model.revenue.cumsum()\n",
"time_model[\"reasons\"] = time_model[\"symbol\"].apply(\n",
" lambda x: [\n",
" json.loads(i)[\"sell\"]['reasons']\n",
" for i in trades.loc[trades.symbol == x].indicators.tolist()\n",
" if json.loads(i)[\"sell\"] and 'reasons' in json.loads(i)[\"sell\"]\n",
" ]\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.plot(time_model.time.tolist(), time_model.total.tolist())\n",
"plt.title(\"Accumulative Revenues\")\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"plt.scatter(\n",
" time_model.time,\n",
" time_model.revenue,\n",
")\n",
"plt.title(\"Trade profit by time\")\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"c1 = []\n",
"c2 = []\n",
"c3 = []\n",
"c4 = []\n",
"c5 = []\n",
"symbols = trades[\"symbol\"].unique()\n",
"for symbol in symbols:\n",
" for algo in trades.loc[trades.symbol == symbol].algo_name.unique():\n",
" position = 0\n",
" for i, row in (\n",
" trades.loc[(trades.symbol == symbol) & (trades.algo_name == algo)]\n",
" .sort_values(\"tstamp\")\n",
" .iterrows()\n",
" ):\n",
" if not position:\n",
" c1.append(symbol)\n",
" c2.append(algo)\n",
" c3.append(row[\"tstamp\"])\n",
" c4.append(0.0)\n",
" c5.append(0.0)\n",
" delta = 0.0\n",
" position += row[\"qty\"] * (\n",
" (1 if row[\"operation\"] == \"sell\" and row[\"qty\"] > 0 else -1)\n",
" )\n",
" delta += (\n",
" float(row[\"price\"])\n",
" * row[\"qty\"]\n",
" * (1 if row[\"operation\"] == \"sell\" and row[\"qty\"] > 0 else -1)\n",
" )\n",
" c4[-1] = delta\n",
" c5[-1] = round((row[\"tstamp\"] - c3[-1]).total_seconds(), 3)\n",
"\n",
"trades_data = pd.DataFrame(\n",
" data={\"symbol\": c1, \"algo\": c2, \"when\": c3, \"revenue\": c4, \"duration\": c5}\n",
").sort_values(\"when\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for algo in trades_data.algo.unique():\n",
" plt.plot(\n",
" trades_data.loc[trades_data.algo == algo].when.tolist(),\n",
" trades_data.loc[trades_data.algo == algo].revenue.cumsum().tolist(),\n",
" label=algo,\n",
" )\n",
"plt.title(\"Accumulative Revenue by Algo\")\n",
"plt.legend()\n",
"plt.show()\n",
"\n",
"for algo in trades_data.algo.unique():\n",
" plt.scatter(\n",
" trades_data.loc[trades_data.algo == algo].when.tolist(),\n",
" trades_data.loc[trades_data.algo == algo].revenue.tolist(),\n",
" label=algo,\n",
" )\n",
"plt.title(\"Trades Profit by Algo over time\")\n",
"plt.legend()\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Revenue Distribution"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for algo in trades_data.algo.unique():\n",
" trades_data.loc[trades_data.algo == algo].revenue.hist(bins=10, alpha=0.5, label=algo)\n",
"plt.legend()\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.0"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"metadata": {},
"outputs": [],
"source": [
"day_to_analyze = \"2020-11-18\"\n",
"day_to_analyze = \"2020-11-12\"\n",
"env = \"PAPER\" # values may be PAPER / PROD / BACKTEST"
]
},
Expand Down Expand Up @@ -48,8 +48,8 @@
"from liualgotrader.analytics import analysis\n",
"from pytz import timezone\n",
"\n",
"%matplotlib inline\n",
"\n",
"%matplotlib inline\n",
"\n",
"nest_asyncio.apply()"
]
Expand Down
File renamed without changes.
Loading

0 comments on commit ad6d794

Please sign in to comment.