Merge pull request #5430 from slowy07/minor-fixing

fix: typo spelling grammar
This commit is contained in:
Matthias 2021-08-16 21:15:12 +02:00 committed by GitHub
commit cf80cabc84
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 16 additions and 16 deletions

View File

@ -105,7 +105,7 @@ To use subaccounts with FTX, you need to edit the configuration and add the foll
## Kucoin ## Kucoin
Kucoin requries a passphrase for each api key, you will therefore need to add this key into the configuration so your exchange section looks as follows: Kucoin requires a passphrase for each api key, you will therefore need to add this key into the configuration so your exchange section looks as follows:
```json ```json
"exchange": { "exchange": {

View File

@ -228,7 +228,7 @@ graph = generate_candlestick_graph(pair=pair,
# Show graph inline # Show graph inline
# graph.show() # graph.show()
# Render graph in a seperate window # Render graph in a separate window
graph.show(renderer="browser") graph.show(renderer="browser")
``` ```

View File

@ -19,7 +19,7 @@ logger = logging.getLogger(__name__)
BT_DATA_COLUMNS_OLD = ["pair", "profit_percent", "open_date", "close_date", "index", BT_DATA_COLUMNS_OLD = ["pair", "profit_percent", "open_date", "close_date", "index",
"trade_duration", "open_rate", "close_rate", "open_at_end", "sell_reason"] "trade_duration", "open_rate", "close_rate", "open_at_end", "sell_reason"]
# Mid-term format, crated by BacktestResult Named Tuple # Mid-term format, created by BacktestResult Named Tuple
BT_DATA_COLUMNS_MID = ['pair', 'profit_percent', 'open_date', 'close_date', 'trade_duration', BT_DATA_COLUMNS_MID = ['pair', 'profit_percent', 'open_date', 'close_date', 'trade_duration',
'open_rate', 'close_rate', 'open_at_end', 'sell_reason', 'fee_open', 'open_rate', 'close_rate', 'open_at_end', 'sell_reason', 'fee_open',
'fee_close', 'amount', 'profit_abs', 'profit_ratio'] 'fee_close', 'amount', 'profit_abs', 'profit_ratio']

View File

@ -242,7 +242,7 @@ def convert_trades_format(config: Dict[str, Any], convert_from: str, convert_to:
:param config: Config dictionary :param config: Config dictionary
:param convert_from: Source format :param convert_from: Source format
:param convert_to: Target format :param convert_to: Target format
:param erase: Erase souce data (does not apply if source and target format are identical) :param erase: Erase source data (does not apply if source and target format are identical)
""" """
from freqtrade.data.history.idatahandler import get_datahandler from freqtrade.data.history.idatahandler import get_datahandler
src = get_datahandler(config['datadir'], convert_from) src = get_datahandler(config['datadir'], convert_from)
@ -267,7 +267,7 @@ def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to:
:param config: Config dictionary :param config: Config dictionary
:param convert_from: Source format :param convert_from: Source format
:param convert_to: Target format :param convert_to: Target format
:param erase: Erase souce data (does not apply if source and target format are identical) :param erase: Erase source data (does not apply if source and target format are identical)
""" """
from freqtrade.data.history.idatahandler import get_datahandler from freqtrade.data.history.idatahandler import get_datahandler
src = get_datahandler(config['datadir'], convert_from) src = get_datahandler(config['datadir'], convert_from)

View File

@ -1497,7 +1497,7 @@ class Exchange:
:returns List of trade data :returns List of trade data
""" """
if not self.exchange_has("fetchTrades"): if not self.exchange_has("fetchTrades"):
raise OperationalException("This exchange does not suport downloading Trades.") raise OperationalException("This exchange does not support downloading Trades.")
return asyncio.get_event_loop().run_until_complete( return asyncio.get_event_loop().run_until_complete(
self._async_get_trade_history(pair=pair, since=since, self._async_get_trade_history(pair=pair, since=since,

View File

@ -977,7 +977,7 @@ class FreqtradeBot(LoggingMixin):
# if trade is partially complete, edit the stake details for the trade # if trade is partially complete, edit the stake details for the trade
# and close the order # and close the order
# cancel_order may not contain the full order dict, so we need to fallback # cancel_order may not contain the full order dict, so we need to fallback
# to the order dict aquired before cancelling. # to the order dict acquired before cancelling.
# we need to fall back to the values from order if corder does not contain these keys. # we need to fall back to the values from order if corder does not contain these keys.
trade.amount = filled_amount trade.amount = filled_amount
trade.stake_amount = trade.amount * trade.open_rate trade.stake_amount = trade.amount * trade.open_rate

View File

@ -538,7 +538,7 @@ def load_and_plot_trades(config: Dict[str, Any]):
- Initializes plot-script - Initializes plot-script
- Get candle (OHLCV) data - Get candle (OHLCV) data
- Generate Dafaframes populated with indicators and signals based on configured strategy - Generate Dafaframes populated with indicators and signals based on configured strategy
- Load trades excecuted during the selected period - Load trades executed during the selected period
- Generate Plotly plot objects - Generate Plotly plot objects
- Generate plot files - Generate plot files
:return: None :return: None

View File

@ -776,7 +776,7 @@ class RPC:
if has_content: if has_content:
dataframe.loc[:, '__date_ts'] = dataframe.loc[:, 'date'].view(int64) // 1000 // 1000 dataframe.loc[:, '__date_ts'] = dataframe.loc[:, 'date'].view(int64) // 1000 // 1000
# Move open to seperate column when signal for easy plotting # Move open to separate column when signal for easy plotting
if 'buy' in dataframe.columns: if 'buy' in dataframe.columns:
buy_mask = (dataframe['buy'] == 1) buy_mask = (dataframe['buy'] == 1)
buy_signals = int(buy_mask.sum()) buy_signals = int(buy_mask.sum())

View File

@ -119,7 +119,7 @@ def test_ohlcv_fill_up_missing_data2(caplog):
# 3rd candle has been filled # 3rd candle has been filled
row = data2.loc[2, :] row = data2.loc[2, :]
assert row['volume'] == 0 assert row['volume'] == 0
# close shoult match close of previous candle # close should match close of previous candle
assert row['close'] == data.loc[1, 'close'] assert row['close'] == data.loc[1, 'close']
assert row['open'] == row['close'] assert row['open'] == row['close']
assert row['high'] == row['close'] assert row['high'] == row['close']

View File

@ -66,7 +66,7 @@ def test_historic_ohlcv_dataformat(mocker, default_conf, ohlcv_history):
hdf5loadmock.assert_not_called() hdf5loadmock.assert_not_called()
jsonloadmock.assert_called_once() jsonloadmock.assert_called_once()
# Swiching to dataformat hdf5 # Switching to dataformat hdf5
hdf5loadmock.reset_mock() hdf5loadmock.reset_mock()
jsonloadmock.reset_mock() jsonloadmock.reset_mock()
default_conf["dataformat_ohlcv"] = "hdf5" default_conf["dataformat_ohlcv"] = "hdf5"

View File

@ -200,15 +200,15 @@ def test_load_cached_data_for_updating(mocker, testdatadir) -> None:
assert start_ts == test_data[0][0] - 1000 assert start_ts == test_data[0][0] - 1000
# timeframe starts in the center of the cached data # timeframe starts in the center of the cached data
# should return the chached data w/o the last item # should return the cached data w/o the last item
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0) timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler) data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler)
assert_frame_equal(data, test_data_df.iloc[:-1]) assert_frame_equal(data, test_data_df.iloc[:-1])
assert test_data[-2][0] <= start_ts < test_data[-1][0] assert test_data[-2][0] <= start_ts < test_data[-1][0]
# timeframe starts after the chached data # timeframe starts after the cached data
# should return the chached data w/o the last item # should return the cached data w/o the last item
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 100, 0) timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 100, 0)
data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler) data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler)
assert_frame_equal(data, test_data_df.iloc[:-1]) assert_frame_equal(data, test_data_df.iloc[:-1])

View File

@ -2182,7 +2182,7 @@ def test_get_historic_trades_notsupported(default_conf, mocker, caplog, exchange
pair = 'ETH/BTC' pair = 'ETH/BTC'
with pytest.raises(OperationalException, with pytest.raises(OperationalException,
match="This exchange does not suport downloading Trades."): match="This exchange does not support downloading Trades."):
exchange.get_historic_trades(pair, since=trades_history[0][0], exchange.get_historic_trades(pair, since=trades_history[0][0],
until=trades_history[-1][0]) until=trades_history[-1][0])

View File

@ -630,7 +630,7 @@ def test_strategy_safe_wrapper_error(caplog, error):
assert ret assert ret
caplog.clear() caplog.clear()
# Test supressing error # Test suppressing error
ret = strategy_safe_wrapper(failing_method, message='DeadBeef', supress_error=True)() ret = strategy_safe_wrapper(failing_method, message='DeadBeef', supress_error=True)()
assert log_has_re(r'DeadBeef.*', caplog) assert log_has_re(r'DeadBeef.*', caplog)