Skip to content

Commit 0359927

Browse files
committed
Fix flake8 warnings
1 parent a15ac8f commit 0359927

File tree

11 files changed

+113
-107
lines changed

11 files changed

+113
-107
lines changed

investing_algorithm_framework/app/algorithm.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@ class is responsible for managing the strategies and executing
2424
2525
:param (optional) name: The name of the algorithm
2626
:param (optional) description: The description of the algorithm
27-
:param (optional) context: The context of the algorithm, for backtest references
27+
:param (optional) context: The context of the algorithm,
28+
for backtest references
2829
:param (optional) strategy: A single strategy to add to the algorithm
2930
:param (optional) data_sources: The list of data sources to
3031
add to the algorithm

investing_algorithm_framework/app/app.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -801,7 +801,8 @@ def run_backtests(
801801

802802
print(
803803
f"{COLOR_YELLOW}Running backtests for date "
804-
f"range:{COLOR_RESET} {COLOR_GREEN}{date_range.name} {date_range.start_date} - "
804+
f"range:{COLOR_RESET} {COLOR_GREEN}{date_range.name} "
805+
f"{date_range.start_date} - "
805806
f"{date_range.end_date} for a "
806807
f"total of {len(algorithms)} algorithms.{COLOR_RESET}"
807808
)

investing_algorithm_framework/domain/graphs.py

Lines changed: 59 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ def create_rsi_graph(data: pd.DataFrame):
1313
if not isinstance(data.index, pd.DatetimeIndex):
1414
raise ValueError("The index of the data should be of type datetime")
1515

16-
1716
# Check if the 'RSI' column exists
1817
if 'RSI' not in data.columns:
1918
raise ValueError("The data should have a 'RSI' column")
@@ -64,6 +63,7 @@ def create_prices_graph(
6463
name=graph_name
6564
)
6665

66+
6767
def create_adx_graph(data: pd.DataFrame):
6868
"""
6969
Create a graph for the ADX metric.
@@ -87,6 +87,7 @@ def create_adx_graph(data: pd.DataFrame):
8787
name="ADX"
8888
)
8989

90+
9091
def create_di_plus_graph(data: pd.DataFrame):
9192
"""
9293
Create a graph for the DI+ metric.
@@ -110,6 +111,7 @@ def create_di_plus_graph(data: pd.DataFrame):
110111
name="+DI"
111112
)
112113

114+
113115
def create_di_minus_graph(data: pd.DataFrame):
114116
"""
115117
Create a graph for the DI- metric.
@@ -133,6 +135,7 @@ def create_di_minus_graph(data: pd.DataFrame):
133135
name="-DI"
134136
)
135137

138+
136139
def create_di_plus_di_minus_crossover_graph(data: pd.DataFrame):
137140
"""
138141
Create a graph for the DI- and DI+ crossover.
@@ -147,7 +150,8 @@ def create_di_plus_di_minus_crossover_graph(data: pd.DataFrame):
147150
raise ValueError("The data should have a '-DI' and '+DI' column")
148151

149152
# Get all crossover indexes
150-
crossover_index = data[(data['+DI'] < data['-DI']) & (data['+DI'].shift(1) > data['-DI'].shift(1))].index
153+
crossover_index = data[(data['+DI'] < data['-DI']) &
154+
(data['+DI'].shift(1) > data['-DI'].shift(1))].index
151155

152156
# Use .loc to get the corresponding 'Close' values
153157
crossover_close_values = data.loc[crossover_index, '+DI']
@@ -170,7 +174,6 @@ def create_ema_graph(data: pd.DataFrame, key, color="blue"):
170174
if key not in data.columns:
171175
raise ValueError(f"The data should have a {key} column")
172176

173-
174177
return go.Scatter(
175178
x=data.index,
176179
y=data[key],
@@ -179,17 +182,22 @@ def create_ema_graph(data: pd.DataFrame, key, color="blue"):
179182
name=key
180183
)
181184

185+
182186
def create_crossover_graph(data: pd.DataFrame, key_one, key_two, color="blue"):
183187
# Check if the index is of type datetime
184188
if not isinstance(data.index, pd.DatetimeIndex):
185189
raise ValueError("The index of the data should be of type datetime")
186190

187191
# Check if the key columns exist
188192
if key_one not in data.columns or key_two not in data.columns:
189-
raise ValueError(f"The data should have a {key_one} and {key_two} column")
193+
raise ValueError(f"The data should have a {key_one} "
194+
f"and {key_two} column")
190195

191196
# Get all crossover indexes
192-
crossover_index = data[(data[key_one] <= data[key_two]) & (data[key_one].shift(1) >= data[key_two].shift(1))].index
197+
crossover_index = data[
198+
(data[key_one] <= data[key_two]) &
199+
(data[key_one].shift(1) >= data[key_two].shift(1))
200+
].index
193201

194202
# Use .loc to get the corresponding 'Close' values
195203
crossover_close_values = data.loc[crossover_index, key_one]
@@ -202,7 +210,8 @@ def create_crossover_graph(data: pd.DataFrame, key_one, key_two, color="blue"):
202210
name=f'{key_one} {key_two} Crossover'
203211
)
204212

205-
def create_peaks_chart(data: pd.DataFrame, key="Close", order = 5):
213+
214+
def create_peaks_chart(data: pd.DataFrame, key="Close", order=5):
206215

207216
# Check if the index is of type datetime
208217
if not isinstance(data.index, pd.DatetimeIndex):
@@ -220,17 +229,8 @@ def create_peaks_chart(data: pd.DataFrame, key="Close", order = 5):
220229
ll_close_index = data[data[f'{key}_lows'] == 1].index
221230
hl_close_index = data[data[f'{key}_lows'] == -1].index
222231

223-
# Subtract for each index 10 hours
224-
# hh_close_index = hh_close_index - pd.Timedelta(hours=2 * order)
225-
# lh_close_index = lh_close_index - pd.Timedelta(hours=2 * order)
226-
# ll_close_index = ll_close_index - pd.Timedelta(hours=2 * order)
227-
# hl_close_index = hl_close_index - pd.Timedelta(hours=2 * order)
228-
# hh_close_index = hh_close_index
229-
# lh_close_index = lh_close_index
230-
# ll_close_index = ll_close_index
231-
# hl_close_index = hl_close_index
232-
233-
# Use .loc to get the corresponding 'Close' values if the index is in the DataFrame
232+
# Use .loc to get the corresponding 'Close' values if
233+
# the index is in the DataFrame
234234
hh_close_values = data.loc[hh_close_index, key]
235235
lh_close_values = data.loc[lh_close_index, key]
236236
ll_close_values = data.loc[ll_close_index, key]
@@ -276,13 +276,18 @@ def create_peaks_chart(data: pd.DataFrame, key="Close", order = 5):
276276
return higher_high_graph, lower_high_graph, lower_lows_graph, higher_lows
277277

278278

279-
def create_bullish_divergence_chart(data: pd.DataFrame, key_one, key_two, color = 'red'):
279+
def create_bullish_divergence_chart(
280+
data: pd.DataFrame, key_one, key_two, color='red'
281+
):
280282
"""
281-
A bullish divergence occurs when the "<key_one>_lows" makes a new low but the "<key_two>_lows" makes a higher low.
283+
A bullish divergence occurs when the "<key_one>_lows" makes
284+
a new low but the "<key_two>_lows" makes a higher low.
282285
283-
For example, if the RSI makes a new low but the close price makes a higher low, then we have a bullish divergence.
286+
For example, if the RSI makes a new low but the close price
287+
makes a higher low, then we have a bullish divergence.
284288
"""
285-
divergence_index = data[(data[f'{key_one}_lows'] == -1) & (data[f'{key_two}_lows'] == 1)].index
289+
divergence_index = data[(data[f'{key_one}_lows'] == -1)
290+
& (data[f'{key_two}_lows'] == 1)].index
286291
divergence_close_values = data.loc[divergence_index, 'Close']
287292

288293
return go.Scatter(
@@ -294,15 +299,20 @@ def create_bullish_divergence_chart(data: pd.DataFrame, key_one, key_two, color
294299
)
295300

296301

297-
def create_bearish_divergence_chart(data: pd.DataFrame, key_one, key_two, color = 'red'):
302+
def create_bearish_divergence_chart(
303+
data: pd.DataFrame, key_one, key_two, color='red'
304+
):
298305
"""
299-
A bearish divergence occurs when the "<key_one>_highs" makes a new high but the "<key_two>_highs" makes a lower high.
306+
A bearish divergence occurs when the "<key_one>_highs" makes a
307+
new high but the "<key_two>_highs" makes a lower high.
300308
301-
For example, if the RSI makes a new high but the close price makes a lower high, then we have a bearish divergence.
309+
For example, if the RSI makes a new high but the close price makes
310+
a lower high, then we have a bearish divergence.
302311
"""
303312

304313
# Add divergence charts
305-
divergence_index = data[(data[f'{key_one}_highs'] == -1) & (data[f'{key_two}_highs'] == 1)].index
314+
divergence_index = data[(data[f'{key_one}_highs'] == -1)
315+
& (data[f'{key_two}_highs'] == 1)].index
306316
divergence_close_values = data.loc[divergence_index, 'Close']
307317

308318
return go.Scatter(
@@ -315,12 +325,14 @@ def create_bearish_divergence_chart(data: pd.DataFrame, key_one, key_two, color
315325

316326

317327
def create_entry_graph(data: pd.DataFrame):
318-
319-
320-
# Iterate over each row in the DataFrame and check if there is a bullish divergence between the RSI and the close price
321-
# and if there is a crossover between the DI+ and DI- for the last 12 hours (6 candles)
328+
# Iterate over each row in the DataFrame and check if there is a
329+
# bullish divergence between the RSI and the close price
330+
# and if there is a crossover between the DI+ and DI- for
331+
# the last 12 hours (6 candles)
322332
# Get all crossover indexes
323-
crossover_index = data[(data['+DI'] <= data['-DI']) & (data['+DI'].shift(1) >= data['-DI'].shift(1))].index
333+
crossover_index = data[(data['+DI'] <= data['-DI']) &
334+
(data['+DI'].shift(1) >= data['-DI'].shift(1))]\
335+
.index
324336
data['di_crossover'] = 0
325337
data.loc[crossover_index, 'di_crossover'] = 1
326338

@@ -330,11 +342,19 @@ def create_entry_graph(data: pd.DataFrame):
330342

331343
if row.di_crossover == 1:
332344
match = False
333-
# Check if there was a bullish divergence between the RSI and the close price in the last 2 days
334-
rsi_window = data.loc[row.Index - pd.Timedelta(days=2):row.Index, 'RSI_lows']
335-
close_window = data.loc[row.Index - pd.Timedelta(days=2):row.Index, 'Close_lows']
336-
337-
# Go over each row and check if there is a bullish divergence between the RSI and the close price
345+
# Check if there was a bullish divergence between
346+
# the RSI and the close price in the last 2 days
347+
rsi_window = data.loc[
348+
row.Index - pd.Timedelta(days=2):row.Index,
349+
'RSI_lows'
350+
]
351+
close_window = data.loc[
352+
row.Index - pd.Timedelta(days=2):row.Index,
353+
'Close_lows'
354+
]
355+
356+
# Go over each row and check if there is a bullish
357+
# divergence between the RSI and the close price
338358
for rsi_row, close_row in zip(rsi_window, close_window):
339359

340360
if rsi_row == -1 and close_row == 1:
@@ -344,36 +364,19 @@ def create_entry_graph(data: pd.DataFrame):
344364

345365
if not match:
346366
# Check if the RSI had decreased
347-
rsi_window = data.loc[row.Index - pd.Timedelta(days=1):row.Index, 'RSI']
367+
rsi_window = data.loc[
368+
row.Index - pd.Timedelta(days=1):row.Index, 'RSI'
369+
]
348370
rsi_diff = rsi_window.diff().mean()
349371

350372
if rsi_diff < -2:
351373
entry_indexes.append(row.Index)
352374

353-
# If ema 50 <
354-
355-
# # Check if there is a bullish divergence between the RSI and the close price
356-
# if row.Close_lows == 1 and row.RSI_lows == -1:
357-
#
358-
# # Check if there is a crossover in the last 12 hours
359-
# crossovers = data.loc[row.Index - pd.Timedelta(hours=12):row.Index, 'di_crossover']
360-
#
361-
# if crossovers.sum() > 0:
362-
# entry_indexes.append(row.Index)
363-
364-
# adx_window = data.loc[row.Index - pd.Timedelta(hours=4):row.Index, 'ADX']
365-
# rsi_window = data.loc[row.Index - pd.Timedelta(hours=4):row.Index, 'RSI']
366-
# adx_diff = adx_window.diff().mean()
367-
# rsi_diff = rsi_window.diff().mean()
368-
#
369-
# if adx_diff > -2 and adx_diff < 0:
370-
# entry_indexes.append(row.Index)
371-
372375
entry_close_values = data.loc[entry_indexes, 'Close']
373376
return go.Scatter(
374377
x=entry_indexes,
375378
y=entry_close_values,
376379
mode='markers',
377380
marker=dict(symbol='circle', size=10, color='green'),
378381
name='Entry Signal'
379-
)
382+
)

investing_algorithm_framework/domain/metrics/price_efficiency.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,4 +55,3 @@ def get_price_efficiency_ratio(data: DataFrame):
5555

5656
# Calculate Efficiency Ratio
5757
return net_price_change / sum_absolute_changes
58-

investing_algorithm_framework/domain/models/backtesting/backtest_report.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
1-
from logging import getLogger
21
from datetime import datetime
3-
from pandas import DataFrame, to_datetime, DatetimeIndex
2+
from logging import getLogger
3+
4+
from pandas import DataFrame
45

5-
from investing_algorithm_framework.domain.models.base_model import BaseModel
6-
from investing_algorithm_framework.domain.models.time_unit import TimeUnit
7-
from investing_algorithm_framework.domain.models\
8-
.backtesting.backtest_date_range import BacktestDateRange
96
from investing_algorithm_framework.domain.constants import DATETIME_FORMAT
107
from investing_algorithm_framework.domain.metrics import \
118
get_price_efficiency_ratio
9+
from investing_algorithm_framework.domain.models \
10+
.backtesting.backtest_date_range import BacktestDateRange
11+
from investing_algorithm_framework.domain.models.base_model import BaseModel
12+
from investing_algorithm_framework.domain.models.time_unit import TimeUnit
1213

1314
logger = getLogger(__name__)
1415

@@ -97,7 +98,6 @@ def __init__(
9798
(self.backtest_date_range.end_date
9899
- self.backtest_date_range.start_date).days
99100

100-
101101
@property
102102
def name(self):
103103
return self._name
@@ -551,7 +551,7 @@ def traces(self, value):
551551
"""
552552
self._traces = value
553553

554-
def get_trace(self, symbol, strategy_id = None):
554+
def get_trace(self, symbol, strategy_id=None):
555555
"""
556556
Get the trace for a given symbol. If a strategy_id is provided,
557557
it will return the trace for that strategy.

investing_algorithm_framework/domain/models/backtesting/backtest_reports_evaluation.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,8 @@ def rank(
179179

180180
for algorithm in ordered_reports:
181181
profit_score += sum(
182-
[report.total_net_gain for report in ordered_reports[algorithm]]
182+
[report.total_net_gain for
183+
report in ordered_reports[algorithm]]
183184
)
184185
growth_score += sum(
185186
[report.growth for report in
@@ -215,7 +216,8 @@ def get_reports(
215216

216217
if name is not None:
217218
return [
218-
report for report in self.backtest_reports if report.name == name
219+
report for report in self.backtest_reports
220+
if report.name == name
219221
]
220222

221223
if backtest_date_range is not None:
@@ -234,7 +236,8 @@ def get_report(
234236
reports = self.get_reports(name, backtest_date_range)
235237

236238
if len(reports) == 0:
237-
raise OperationalException("No matches for given name and date range")
239+
raise OperationalException(
240+
"No matches for given name and date range"
241+
)
238242

239243
return reports[0]
240-

investing_algorithm_framework/infrastructure/models/market_data_sources/csv.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,27 @@
11
import logging
2-
from datetime import datetime, timedelta
2+
from datetime import datetime
33

44
import polars
55
from dateutil.parser import parse
66

77
from investing_algorithm_framework.domain import OHLCVMarketDataSource, \
88
BacktestMarketDataSource, OperationalException, TickerMarketDataSource, \
9-
DATETIME_FORMAT, TimeFrame
9+
DATETIME_FORMAT
1010

1111
logger = logging.getLogger(__name__)
1212

1313

1414
class CSVOHLCVMarketDataSource(OHLCVMarketDataSource):
1515
"""
16-
Implementation of a OHLCV data source that reads OHLCV data from a csv file.
17-
Market data source that reads OHLCV data from a csv file.
16+
Implementation of a OHLCV data source that reads OHLCV data
17+
from a csv file. Market data source that reads OHLCV data from a csv file.
1818
"""
1919

2020
def empty(self, start_date, end_date=None):
2121
if end_date is None:
22-
end_date = self.create_end_date(start_date, self.timeframe, self.window_size)
22+
end_date = self.create_end_date(
23+
start_date, self.timeframe, self.window_size
24+
)
2325
data = self.get_data(start_date=start_date, end_date=end_date)
2426
return len(data) == 0
2527

investing_algorithm_framework/services/backtesting/backtest_report_writer_service.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
import csv
2-
import os
31
import json
2+
import os
43

54
from investing_algorithm_framework.domain import BacktestReport, \
65
DATETIME_FORMAT_BACKTESTING

0 commit comments

Comments
 (0)