Merge pull request #94 from gcarq/autopep

autoformat with autopep8
This commit is contained in:
Samuel Husso 2017-11-06 19:41:57 +02:00 committed by GitHub
commit 3d42b9fd75
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 46 additions and 37 deletions

View File

@ -23,7 +23,7 @@ def parse_ticker_dataframe(ticker: list) -> DataFrame:
""" """
df = DataFrame(ticker) \ df = DataFrame(ticker) \
.drop('BV', 1) \ .drop('BV', 1) \
.rename(columns={'C':'close', 'V':'volume', 'O':'open', 'H':'high', 'L':'low', 'T':'date'}) .rename(columns={'C': 'close', 'V': 'volume', 'O': 'open', 'H': 'high', 'L': 'low', 'T': 'date'})
df['date'] = to_datetime(df['date'], utc=True, infer_datetime_format=True) df['date'] = to_datetime(df['date'], utc=True, infer_datetime_format=True)
df.sort_values('date', inplace=True) df.sort_values('date', inplace=True)
return df return df

View File

@ -208,7 +208,7 @@ def create_trade(stake_amount: float) -> Optional[Trade]:
return Trade(pair=pair, return Trade(pair=pair,
stake_amount=stake_amount, stake_amount=stake_amount,
amount=amount, amount=amount,
fee=fee*2, fee=fee * 2,
open_rate=buy_limit, open_rate=buy_limit,
open_date=datetime.utcnow(), open_date=datetime.utcnow(),
exchange=exchange.get_name().upper(), exchange=exchange.get_name().upper(),

View File

@ -18,11 +18,7 @@ logging.disable(logging.DEBUG) # disable debug logs that slow backtesting a lot
def format_results(results): def format_results(results):
return 'Made {} buys. Average profit {:.2f}%. Total profit was {:.3f}. Average duration {:.1f} mins.'.format( return 'Made {} buys. Average profit {:.2f}%. Total profit was {:.3f}. Average duration {:.1f} mins.'.format(
len(results.index), len(results.index), results.profit.mean() * 100.0, results.profit.sum(), results.duration.mean() * 5)
results.profit.mean() * 100.0,
results.profit.sum(),
results.duration.mean() * 5
)
def print_pair_results(pair, results): def print_pair_results(pair, results):
@ -56,7 +52,7 @@ def backtest(conf, pairs, mocker):
mocker.patch.dict('freqtrade.main._CONF', conf) mocker.patch.dict('freqtrade.main._CONF', conf)
mocker.patch('arrow.utcnow', return_value=arrow.get('2017-08-20T14:50:00')) mocker.patch('arrow.utcnow', return_value=arrow.get('2017-08-20T14:50:00'))
for pair in pairs: for pair in pairs:
with open('freqtrade/tests/testdata/'+pair+'.json') as data_file: with open('freqtrade/tests/testdata/' + pair + '.json') as data_file:
mocked_history.return_value = json.load(data_file) mocked_history.return_value = json.load(data_file)
ticker = analyze_ticker(pair)[['close', 'date', 'buy']].copy() ticker = analyze_ticker(pair)[['close', 'date', 'buy']].copy()
# for each buy point # for each buy point
@ -65,7 +61,7 @@ def backtest(conf, pairs, mocker):
open_rate=row.close, open_rate=row.close,
open_date=row.date, open_date=row.date,
amount=1, amount=1,
fee=exchange.get_fee()*2 fee=exchange.get_fee() * 2
) )
# calculate win/lose forwards from buy point # calculate win/lose forwards from buy point
for row2 in ticker[row.Index:].itertuples(index=True): for row2 in ticker[row.Index:].itertuples(index=True):

View File

@ -98,7 +98,11 @@ def test_status_table_handle(conf, update, mocker):
mocker.patch.dict('freqtrade.main._CONF', conf) mocker.patch.dict('freqtrade.main._CONF', conf)
mocker.patch('freqtrade.main.get_buy_signal', side_effect=lambda _: True) mocker.patch('freqtrade.main.get_buy_signal', side_effect=lambda _: True)
msg_mock = MagicMock() msg_mock = MagicMock()
mocker.patch.multiple('freqtrade.main.telegram', _CONF=conf, init=MagicMock(), send_msg=msg_mock) mocker.patch.multiple(
'freqtrade.main.telegram',
_CONF=conf,
init=MagicMock(),
send_msg=msg_mock)
mocker.patch.multiple('freqtrade.main.exchange', mocker.patch.multiple('freqtrade.main.exchange',
validate_pairs=MagicMock(), validate_pairs=MagicMock(),
get_ticker=MagicMock(return_value={ get_ticker=MagicMock(return_value={
@ -269,12 +273,15 @@ def test_performance_handle(conf, update, mocker):
assert '<code>BTC_ETH\t10.05%</code>' in msg_mock.call_args_list[-1][0][0] assert '<code>BTC_ETH\t10.05%</code>' in msg_mock.call_args_list[-1][0][0]
def test_count_handle(conf, update, mocker): def test_count_handle(conf, update, mocker):
mocker.patch.dict('freqtrade.main._CONF', conf) mocker.patch.dict('freqtrade.main._CONF', conf)
mocker.patch('freqtrade.main.get_buy_signal', side_effect=lambda _: True) mocker.patch('freqtrade.main.get_buy_signal', side_effect=lambda _: True)
msg_mock = MagicMock() msg_mock = MagicMock()
mocker.patch.multiple('freqtrade.main.telegram', _CONF=conf, init=MagicMock(), send_msg=msg_mock) mocker.patch.multiple(
'freqtrade.main.telegram',
_CONF=conf,
init=MagicMock(),
send_msg=msg_mock)
mocker.patch.multiple('freqtrade.main.exchange', mocker.patch.multiple('freqtrade.main.exchange',
validate_pairs=MagicMock(), validate_pairs=MagicMock(),
get_ticker=MagicMock(return_value={ get_ticker=MagicMock(return_value={

View File

@ -91,7 +91,7 @@ def session(df, start='17:00', end='16:00'):
curr = prev = df[-1:].index[0].strftime('%Y-%m-%d') curr = prev = df[-1:].index[0].strftime('%Y-%m-%d')
# globex/forex session # globex/forex session
if is_same_day == False: if not is_same_day:
prev = (datetime.strptime(curr, '%Y-%m-%d') - prev = (datetime.strptime(curr, '%Y-%m-%d') -
timedelta(1)).strftime('%Y-%m-%d') timedelta(1)).strftime('%Y-%m-%d')
@ -117,13 +117,19 @@ def heikinashi(bars):
bars['ha_high'] = bars.loc[:, ['high', 'ha_open', 'ha_close']].max(axis=1) bars['ha_high'] = bars.loc[:, ['high', 'ha_open', 'ha_close']].max(axis=1)
bars['ha_low'] = bars.loc[:, ['low', 'ha_open', 'ha_close']].min(axis=1) bars['ha_low'] = bars.loc[:, ['low', 'ha_open', 'ha_close']].min(axis=1)
return pd.DataFrame(index=bars.index, data={'open': bars['ha_open'], return pd.DataFrame(
'high': bars['ha_high'], 'low': bars['ha_low'], 'close': bars['ha_close']}) index=bars.index,
data={
'open': bars['ha_open'],
'high': bars['ha_high'],
'low': bars['ha_low'],
'close': bars['ha_close']})
# --------------------------------------------- # ---------------------------------------------
def tdi(series, rsi_len=13, bollinger_len=34, rsi_smoothing=2, rsi_signal_len=7, bollinger_std=1.6185): def tdi(series, rsi_len=13, bollinger_len=34, rsi_smoothing=2,
rsi_signal_len=7, bollinger_std=1.6185):
rsi_series = rsi(series, rsi_len) rsi_series = rsi(series, rsi_len)
bb_series = bollinger_bands(rsi_series, bollinger_len, bollinger_std) bb_series = bollinger_bands(rsi_series, bollinger_len, bollinger_std)
signal = sma(rsi_series, rsi_signal_len) signal = sma(rsi_series, rsi_signal_len)
@ -248,9 +254,9 @@ def rolling_std(series, window=200, min_periods=None):
else: else:
try: try:
return series.rolling(window=window, min_periods=min_periods).std() return series.rolling(window=window, min_periods=min_periods).std()
except: except BaseException:
return pd.Series(series).rolling(window=window, min_periods=min_periods).std() return pd.Series(series).rolling(window=window, min_periods=min_periods).std()
except: except BaseException:
return pd.rolling_std(series, window=window, min_periods=min_periods) return pd.rolling_std(series, window=window, min_periods=min_periods)
@ -264,9 +270,9 @@ def rolling_mean(series, window=200, min_periods=None):
else: else:
try: try:
return series.rolling(window=window, min_periods=min_periods).mean() return series.rolling(window=window, min_periods=min_periods).mean()
except: except BaseException:
return pd.Series(series).rolling(window=window, min_periods=min_periods).mean() return pd.Series(series).rolling(window=window, min_periods=min_periods).mean()
except: except BaseException:
return pd.rolling_mean(series, window=window, min_periods=min_periods) return pd.rolling_mean(series, window=window, min_periods=min_periods)
@ -277,9 +283,9 @@ def rolling_min(series, window=14, min_periods=None):
try: try:
try: try:
return series.rolling(window=window, min_periods=min_periods).min() return series.rolling(window=window, min_periods=min_periods).min()
except: except BaseException:
return pd.Series(series).rolling(window=window, min_periods=min_periods).min() return pd.Series(series).rolling(window=window, min_periods=min_periods).min()
except: except BaseException:
return pd.rolling_min(series, window=window, min_periods=min_periods) return pd.rolling_min(series, window=window, min_periods=min_periods)
@ -290,9 +296,9 @@ def rolling_max(series, window=14, min_periods=None):
try: try:
try: try:
return series.rolling(window=window, min_periods=min_periods).min() return series.rolling(window=window, min_periods=min_periods).min()
except: except BaseException:
return pd.Series(series).rolling(window=window, min_periods=min_periods).min() return pd.Series(series).rolling(window=window, min_periods=min_periods).min()
except: except BaseException:
return pd.rolling_min(series, window=window, min_periods=min_periods) return pd.rolling_min(series, window=window, min_periods=min_periods)
@ -302,7 +308,7 @@ def rolling_weighted_mean(series, window=200, min_periods=None):
min_periods = window if min_periods is None else min_periods min_periods = window if min_periods is None else min_periods
try: try:
return series.ewm(span=window, min_periods=min_periods).mean() return series.ewm(span=window, min_periods=min_periods).mean()
except: except BaseException:
return pd.ewma(series, span=window, min_periods=min_periods) return pd.ewma(series, span=window, min_periods=min_periods)
@ -457,7 +463,7 @@ def returns(series):
try: try:
res = (series / series.shift(1) - res = (series / series.shift(1) -
1).replace([np.inf, -np.inf], float('NaN')) 1).replace([np.inf, -np.inf], float('NaN'))
except: except BaseException:
res = nans(len(series)) res = nans(len(series))
return pd.Series(index=series.index, data=res) return pd.Series(index=series.index, data=res)
@ -469,7 +475,7 @@ def log_returns(series):
try: try:
res = np.log(series / series.shift(1) res = np.log(series / series.shift(1)
).replace([np.inf, -np.inf], float('NaN')) ).replace([np.inf, -np.inf], float('NaN'))
except: except BaseException:
res = nans(len(series)) res = nans(len(series))
return pd.Series(index=series.index, data=res) return pd.Series(index=series.index, data=res)
@ -482,7 +488,7 @@ def implied_volatility(series, window=252):
logret = np.log(series / series.shift(1) logret = np.log(series / series.shift(1)
).replace([np.inf, -np.inf], float('NaN')) ).replace([np.inf, -np.inf], float('NaN'))
res = numpy_rolling_std(logret, window) * np.sqrt(window) res = numpy_rolling_std(logret, window) * np.sqrt(window)
except: except BaseException:
res = nans(len(series)) res = nans(len(series))
return pd.Series(index=series.index, data=res) return pd.Series(index=series.index, data=res)