diff --git a/docs/JOSS_paper/note_to_editors.txt b/docs/JOSS_paper/note_to_editors.txt new file mode 100644 index 000000000..b0ec68e72 --- /dev/null +++ b/docs/JOSS_paper/note_to_editors.txt @@ -0,0 +1,15 @@ +Dear Editors, +We present a paper for ``FreqAI`` a machine learning sandbox for researchers and citizen scientists alike. +There are a large number of authors, however all have contributed in a significant way to this paper. +For clarity the contribution of each author is outlined: + + - Robert Caulk : Conception and software development + - Elin Tornquist : Theoretical brainstorming, data analysis, tool dev + - Matthias Voppichler : Software architecture and code review + - Andrew R. Lawless : Extensive testing, feature brainstorming + - Ryan McMullan : Extensive testing, feature brainstorming + - Wagner Costa Santos : Major backtesting developments, extensive testing + - Pascal Schmidt : Extensive testing, feature brainstorming + - Timothy C. Pogue : Webhooks forecast sharing + - Stefan P. Gehring : Extensive testing, feature brainstorming + - Johan van der Vlugt : Extensive testing, feature brainstorming diff --git a/docs/JOSS_paper/paper.bib b/docs/JOSS_paper/paper.bib new file mode 100644 index 000000000..dc7186a56 --- /dev/null +++ b/docs/JOSS_paper/paper.bib @@ -0,0 +1,206 @@ +@article{scikit-learn, + title={Scikit-learn: Machine Learning in {P}ython}, + author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V. + and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P. + and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and + Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.}, + journal={Journal of Machine Learning Research}, + volume={12}, + pages={2825--2830}, + year={2011} +} + +@inproceedings{catboost, +author = {Prokhorenkova, Liudmila and Gusev, Gleb and Vorobev, Aleksandr and Dorogush, Anna Veronika and Gulin, Andrey}, +title = {CatBoost: Unbiased Boosting with Categorical Features}, +year = {2018}, +publisher = {Curran Associates Inc.}, +address = {Red Hook, NY, USA}, +abstract = {This paper presents the key algorithmic techniques behind CatBoost, a new gradient boosting toolkit. Their combination leads to CatBoost outperforming other publicly available boosting implementations in terms of quality on a variety of datasets. Two critical algorithmic advances introduced in CatBoost are the implementation of ordered boosting, a permutation-driven alternative to the classic algorithm, and an innovative algorithm for processing categorical features. Both techniques were created to fight a prediction shift caused by a special kind of target leakage present in all currently existing implementations of gradient boosting algorithms. In this paper, we provide a detailed analysis of this problem and demonstrate that proposed algorithms solve it effectively, leading to excellent empirical results.}, +booktitle = {Proceedings of the 32nd International Conference on Neural Information Processing Systems}, +pages = {6639–6649}, +numpages = {11}, +location = {Montr\'{e}al, Canada}, +series = {NIPS'18} +} + + +@article{lightgbm, + title={Lightgbm: A highly efficient gradient boosting decision tree}, + author={Ke, Guolin and Meng, Qi and Finley, Thomas and Wang, Taifeng and Chen, Wei and Ma, Weidong and Ye, Qiwei and Liu, Tie-Yan}, + journal={Advances in neural information processing systems}, + volume={30}, + pages={3146--3154}, + year={2017} +} + +@inproceedings{xgboost, + author = {Chen, Tianqi and Guestrin, Carlos}, + title = {{XGBoost}: A Scalable Tree Boosting System}, + booktitle = {Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining}, + series = {KDD '16}, + year = {2016}, + isbn = {978-1-4503-4232-2}, + location = {San Francisco, California, USA}, + pages = {785--794}, + numpages = {10}, + url = {http://doi.acm.org/10.1145/2939672.2939785}, + doi = {10.1145/2939672.2939785}, + acmid = {2939785}, + publisher = {ACM}, + address = {New York, NY, USA}, + keywords = {large-scale machine learning}, +} + +@article{stable-baselines3, + author = {Antonin Raffin and Ashley Hill and Adam Gleave and Anssi Kanervisto and Maximilian Ernestus and Noah Dormann}, + title = {Stable-Baselines3: Reliable Reinforcement Learning Implementations}, + journal = {Journal of Machine Learning Research}, + year = {2021}, + volume = {22}, + number = {268}, + pages = {1-8}, + url = {http://jmlr.org/papers/v22/20-1364.html} +} + +@misc{openai, + title={OpenAI Gym}, + author={Greg Brockman and Vicki Cheung and Ludwig Pettersson and Jonas Schneider and John Schulman and Jie Tang and Wojciech Zaremba}, + year={2016}, + eprint={1606.01540}, + archivePrefix={arXiv}, + primaryClass={cs.LG} +} + +@misc{tensorflow, +title={ {TensorFlow}: Large-Scale Machine Learning on Heterogeneous Systems}, +url={https://www.tensorflow.org/}, +note={Software available from tensorflow.org}, +author={ + Mart\'{i}n~Abadi and + Ashish~Agarwal and + Paul~Barham and + Eugene~Brevdo and + Zhifeng~Chen and + Craig~Citro and + Greg~S.~Corrado and + Andy~Davis and + Jeffrey~Dean and + Matthieu~Devin and + Sanjay~Ghemawat and + Ian~Goodfellow and + Andrew~Harp and + Geoffrey~Irving and + Michael~Isard and + Yangqing Jia and + Rafal~Jozefowicz and + Lukasz~Kaiser and + Manjunath~Kudlur and + Josh~Levenberg and + Dandelion~Man\'{e} and + Rajat~Monga and + Sherry~Moore and + Derek~Murray and + Chris~Olah and + Mike~Schuster and + Jonathon~Shlens and + Benoit~Steiner and + Ilya~Sutskever and + Kunal~Talwar and + Paul~Tucker and + Vincent~Vanhoucke and + Vijay~Vasudevan and + Fernanda~Vi\'{e}gas and + Oriol~Vinyals and + Pete~Warden and + Martin~Wattenberg and + Martin~Wicke and + Yuan~Yu and + Xiaoqiang~Zheng}, + year={2015}, +} + +@incollection{pytorch, +title = {PyTorch: An Imperative Style, High-Performance Deep Learning Library}, +author = {Paszke, Adam and Gross, Sam and Massa, Francisco and Lerer, Adam and Bradbury, James and Chanan, Gregory and Killeen, Trevor and Lin, Zeming and Gimelshein, Natalia and Antiga, Luca and Desmaison, Alban and Kopf, Andreas and Yang, Edward and DeVito, Zachary and Raison, Martin and Tejani, Alykhan and Chilamkurthy, Sasank and Steiner, Benoit and Fang, Lu and Bai, Junjie and Chintala, Soumith}, +booktitle = {Advances in Neural Information Processing Systems 32}, +editor = {H. Wallach and H. Larochelle and A. Beygelzimer and F. d\textquotesingle Alch\'{e}-Buc and E. Fox and R. Garnett}, +pages = {8024--8035}, +year = {2019}, +publisher = {Curran Associates, Inc.}, +url = {http://papers.neurips.cc/paper/9015-pytorch-an-imperative-style-high-performance-deep-learning-library.pdf} +} + +@ARTICLE{scipy, + author = {Virtanen, Pauli and Gommers, Ralf and Oliphant, Travis E. and + Haberland, Matt and Reddy, Tyler and Cournapeau, David and + Burovski, Evgeni and Peterson, Pearu and Weckesser, Warren and + Bright, Jonathan and {van der Walt}, St{\'e}fan J. and + Brett, Matthew and Wilson, Joshua and Millman, K. Jarrod and + Mayorov, Nikolay and Nelson, Andrew R. J. and Jones, Eric and + Kern, Robert and Larson, Eric and Carey, C J and + Polat, {\.I}lhan and Feng, Yu and Moore, Eric W. and + {VanderPlas}, Jake and Laxalde, Denis and Perktold, Josef and + Cimrman, Robert and Henriksen, Ian and Quintero, E. A. and + Harris, Charles R. and Archibald, Anne M. and + Ribeiro, Ant{\^o}nio H. and Pedregosa, Fabian and + {van Mulbregt}, Paul and {SciPy 1.0 Contributors}}, + title = {{{SciPy} 1.0: Fundamental Algorithms for Scientific + Computing in Python}}, + journal = {Nature Methods}, + year = {2020}, + volume = {17}, + pages = {261--272}, + adsurl = {https://rdcu.be/b08Wh}, + doi = {10.1038/s41592-019-0686-2}, +} + +@Article{numpy, + title = {Array programming with {NumPy}}, + author = {Charles R. Harris and K. Jarrod Millman and St{\'{e}}fan J. + van der Walt and Ralf Gommers and Pauli Virtanen and David + Cournapeau and Eric Wieser and Julian Taylor and Sebastian + Berg and Nathaniel J. Smith and Robert Kern and Matti Picus + and Stephan Hoyer and Marten H. van Kerkwijk and Matthew + Brett and Allan Haldane and Jaime Fern{\'{a}}ndez del + R{\'{i}}o and Mark Wiebe and Pearu Peterson and Pierre + G{\'{e}}rard-Marchant and Kevin Sheppard and Tyler Reddy and + Warren Weckesser and Hameer Abbasi and Christoph Gohlke and + Travis E. Oliphant}, + year = {2020}, + month = sep, + journal = {Nature}, + volume = {585}, + number = {7825}, + pages = {357--362}, + doi = {10.1038/s41586-020-2649-2}, + publisher = {Springer Science and Business Media {LLC}}, + url = {https://doi.org/10.1038/s41586-020-2649-2} +} + + @inproceedings{pandas, + title={Data structures for statistical computing in python}, + author={McKinney, Wes and others}, + booktitle={Proceedings of the 9th Python in Science Conference}, + volume={445}, + pages={51--56}, + year={2010}, + organization={Austin, TX} +} + + + +@online{finrl, + title = {AI4Finance-Foundation}, + year = 2022, + url = {https://github.com/AI4Finance-Foundation/FinRL}, + urldate = {2022-09-30} +} + + +@online{tensortrade, + title = {tensortrade}, + year = 2022, + url = {https://tensortradex.readthedocs.io/en/latest/L}, + urldate = {2022-09-30} +} \ No newline at end of file diff --git a/docs/JOSS_paper/paper.jats b/docs/JOSS_paper/paper.jats new file mode 100644 index 000000000..575c6717e --- /dev/null +++ b/docs/JOSS_paper/paper.jats @@ -0,0 +1,941 @@ + + +
+ + + + +Journal of Open Source Software +JOSS + +2475-9066 + +Open Journals + + + +0 +N/A + +FreqAI: generalizing adaptive +modeling for chaotic time-series market forecasts + + + +0000-0001-5618-8629 + +Ph.D +Robert A. Caulk + + + + + +0000-0003-3289-8604 + +Ph.D +Elin Törnquist + + + + + + +Voppichler +Matthias + + + + + +Lawless +Andrew R. + + + + + +McMullan +Ryan + + + + + +Santos +Wagner Costa + + + + + + +Pogue +Timothy C. + + + + + + +van der Vlugt +Johan + + + + + +Gehring +Stefan P. + + + + + +Schmidt +Pascal + + + + + +Emergent Methods LLC, Arvada Colorado, 80005, +USA + + + + +Freqtrade open source project + + + +¿VOL? +¿ISSUE? +¿PAGE? + +Authors of papers retain copyright and release the +work under a Creative Commons Attribution 4.0 International License (CC +BY 4.0) +2022 +The article authors + +Authors of papers retain copyright and release the work under +a Creative Commons Attribution 4.0 International License (CC BY +4.0) + + + +Python +Machine Learning +adaptive modeling +chaotic systems +time-series forecasting + + + + + + Statement of need +

Forecasting chaotic time-series based systems, such as + equity/cryptocurrency markets, requires a broad set of tools geared + toward testing a wide range of hypotheses. Fortunately, a recent + maturation of robust machine learning libraries + (e.g. scikit-learn), has opened up a wide range + of research possibilities. Scientists from a diverse range of fields + can now easily prototype their studies on an abundance of established + machine learning algorithms. Similarly, these user-friendly libraries + enable “citzen scientists” to use their basic Python skills for + data-exploration. However, leveraging these machine learning libraries + on historical and live chaotic data sources can be logistically + difficult and expensive. Additionally, robust data-collection, + storage, and handling presents a disparate challenge. + FreqAI + aims to provide a generalized and extensible open-sourced framework + geared toward live deployments of adaptive modeling for market + forecasting. The FreqAI framework is + effectively a sandbox for the rich world of open-source machine + learning libraries. Inside the FreqAI sandbox, + users find they can combine a wide variety of third-party libraries to + test creative hypotheses on a free live 24/7 chaotic data source - + cryptocurrency exchange data.

+
+ + Summary +

FreqAI + evolved from a desire to test and compare a range of adaptive + time-series forecasting methods on chaotic data. Cryptocurrency + markets provide a unique data source since they are operational 24/7 + and the data is freely available. Luckily, an existing open-source + software, + Freqtrade, + had already matured under a range of talented developers to support + robust data collection/storage, as well as robust live environmental + interactions for standard algorithmic trading. + Freqtrade also provides a set of data + analysis/visualization tools for the evaluation of historical + performance as well as live environmental feedback. + FreqAI builds on top of + Freqtrade to include a user-friendly well + tested interface for integrating external machine learning libraries + for adaptive time-series forecasting. Beyond enabling the integration + of existing libraries, FreqAI hosts a range of + custom algorithms and methodologies aimed at improving computational + and predictive performances. Thus, FreqAI + contains a range of unique features which can be easily tested in + combination with all the existing Python-accessible machine learning + libraries to generate novel research on live and historical data.

+

The high-level overview of the software is depicted in Figure + 1.

+

freqai-algo + Abstracted overview of FreqAI algorithm

+ + Connecting machine learning libraries +

Although the FreqAI framework is designed + to accommodate any Python library in the “Model training” and + “Feature set engineering” portions of the software (Figure 1), it + already boasts a wide range of well documented examples based on + various combinations of:

+ + +

scikit-learn + (Pedregosa + et al., 2011), Catboost + (Prokhorenkova + et al., 2018), LightGBM + (Ke + et al., 2017), XGBoost + (Chen + & Guestrin, 2016), stable_baselines3 + (Raffin + et al., 2021), openai gym + (Brockman + et al., 2016), tensorflow + (Abadi + et al., 2015), pytorch + (Paszke + et al., 2019), Scipy + (Virtanen + et al., 2020), Numpy + (Harris + et al., 2020), and pandas + (McKinney + & others, 2010).

+
+
+

These mature projects contain a wide range of peer-reviewed and + industry standard methods, including:

+ + +

Regression, Classification, Neural Networks, Reinforcement + Learning, Support Vector Machines, Principal Component Analysis, + point clustering, and much more.

+
+
+

which are all leveraged in FreqAI for + users to use as templates or extend with their own methods.

+
+ + Furnishing novel methods and features +

Beyond the industry standard methods available through external + libraries - FreqAI includes novel methods + which are not available anywhere else in the open-source (or + scientific) world. For example, FreqAI + provides :

+ + +

a custom algorithm/methodology for adaptive modeling

+
+ +

rapid and self-monitored feature engineering tools

+
+ +

unique model features/indicators

+
+ +

optimized data collection algorithms

+
+ +

safely integrated outlier detection methods

+
+ +

websocket communicated forecasts

+
+
+

Of particular interest for researchers, + FreqAI provides the option of large scale + experimentation via an optimized websocket communications + interface.

+
+ + Optimizing the back-end +

FreqAI aims to make it simple for users to + combine all the above tools to run studies based in two distinct + modules:

+ + +

backtesting studies

+
+ +

live-deployments

+
+
+

Both of these modules and their respective data management + systems are built on top of + Freqtrade, + a mature and actively developed cryptocurrency trading software. + This means that FreqAI benefits from a wide + range of tangential/disparate feature developments such as:

+ + +

FreqUI, a graphical interface for backtesting and live + monitoring

+
+ +

telegram control

+
+ +

robust database handling

+
+ +

futures/leverage trading

+
+ +

dollar cost averaging

+
+ +

trading strategy handling

+
+ +

a variety of free data sources via CCXT (FTX, Binance, Kucoin + etc.)

+
+
+

These features derive from a strong external developer community + that shares in the benefit and stability of a communal CI + (Continuous Integration) system. Beyond the developer community, + FreqAI benefits strongly from the userbase of + Freqtrade, where most + FreqAI beta-testers/developers originated. + This symbiotic relationship between Freqtrade + and FreqAI ignited a thoroughly tested + beta, + which demanded a four month beta and + comprehensive + documentation containing:

+ + +

numerous example scripts

+
+ +

a full parameter table

+
+ +

methodological descriptions

+
+ +

high-resolution diagrams/figures

+
+ +

detailed parameter setting recommendations

+
+
+
+ + Providing a reproducible foundation for researchers +

FreqAI provides an extensible, robust, + framework for researchers and citizen data scientists. The + FreqAI sandbox enables rapid conception and + testing of exotic hypotheses. From a research perspective, + FreqAI handles the multitude of logistics + associated with live deployments, historical backtesting, and + feature engineering. With FreqAI, researchers + can focus on their primary interests of feature engineering and + hypothesis testing rather than figuring out how to collect and + handle data. Further - the well maintained and easily installed + open-source framework of FreqAI enables + reproducible scientific studies. This reproducibility component is + essential to general scientific advancement in time-series + forecasting for chaotic systems.

+
+
+ + Technical details +

Typical users configure FreqAI via two + files:

+ + +

A configuration file + (--config) which provides access to the + full parameter list available + here:

+
+
+ + +

control high-level feature engineering

+
+ +

customize adaptive modeling techniques

+
+ +

set any model training parameters available in third-party + libraries

+
+ +

manage adaptive modeling parameters (retrain frequency, + training window size, continual learning, etc.)

+
+
+ + + +

A strategy file (--strategy) where + users:

+
+
+ + +

list of the base training features

+
+ +

set standard technical-analysis strategies

+
+ +

control trade entry/exit criteria

+
+
+

With these two files, most users can exploit a wide range of + pre-existing integrations in Catboost and 7 + other libraries with a simple command:

+ freqtrade trade --config config_freqai.example.json --strategy FreqaiExampleStrategy --freqaimodel CatboostRegressor +

Advanced users will edit one of the existing + --freqaimodel files, which are simply an + children of the IFreqaiModel (details below). + Within these files, advanced users can customize training procedures, + prediction procedures, outlier detection methods, data preparation, + data saving methods, etc. This is all configured in a way where they + can customize as little or as much as they want. This flexible + customization is owed to the foundational architecture in + FreqAI, which is comprised of three distinct + Python objects:

+ + +

IFreqaiModel

+ + +

A singular long-lived object containing all the necessary + logic to collect data, store data, process data, engineer + features, run training, and inference models.

+
+
+
+ +

FreqaiDataKitchen

+ + +

A short-lived object which is uniquely created for each + asset/model. Beyond metadata, it also contains a variety of + data processing tools.

+
+
+
+ +

FreqaiDataDrawer

+ + +

Singular long-lived object containing all the historical + predictions, models, and save/load methods.

+
+
+
+
+

These objects interact with one another with one goal in mind - to + provide a clean data set to machine learning experts/enthusiasts at + the user endpoint. These power-users interact with an inherited + IFreqaiModel that allows them to dig as deep or + as shallow as they wish into the inheritence tree. Typical power-users + focus their efforts on customizing training procedures and testing + exotic functionalities available in third-party libraries. Thus, + power-users are freed from the algorithmic weight associated with data + management, and can instead focus their energy on testing creative + hypotheses. Meanwhile, some users choose to override deeper + functionalities within IFreqaiModel to help + them craft unique data structures and training procedures.

+

The class structure and algorithmic details are depicted in the + following diagram:

+

image + Class diagram summarizing object interactions in + FreqAI

+
+ + Online documentation +

The documentation for + FreqAI + is available online at + https://www.freqtrade.io/en/latest/freqai/ + and covers a wide range of materials:

+ + +

Quick-start with a single command and example files - + (beginners)

+
+ +

Introduction to the feature engineering interface and basic + configurations - (intermediate users)

+
+ +

Parameter table with indepth descriptions and default parameter + setting recommendations - (intermediate users)

+
+ +

Data analysis and post-processing - (advanced users)

+
+ +

Methodological considerations complemented by high resolution + figures - (advanced users)

+
+ +

Instructions for integrating third party machine learning + libraries into custom prediction models - (advanced users)

+
+ +

Software architectural description with class diagram - + (developers)

+
+ +

File structure descriptions - (developers)

+
+
+

The docs direct users to a variety of pre-made examples which + integrate Catboost, + LightGBM, XGBoost, + Sklearn, + stable_baselines3, + torch, tensorflow. + Meanwhile, developers will also find thorough docstrings and type + hinting throughout the source code to aid in code readability and + customization.

+

FreqAI also benefits from a strong support + network of users and developers on the + Freqtrade + discord as well as on the + FreqAI + discord. Within the FreqAI discord, + users will find a deep and easily searched knowledge base containing + common errors. But more importantly, users in the + FreqAI discord share anectdotal and + quantitative observations which compare performance between various + third-party libraries and methods.

+
+ + State of the field +

There are two other open-source tools which are geared toward + helping users build models for time-series forecasts on market based + data. However, each of these tools suffer from a non-generalized + frameworks that do not permit comparison of methods and libraries. + Additionally, they do not permit easy live-deployments or + adaptive-modeling methods. For example, two open-sourced projects + called + tensortrade + (Tensortrade, + 2022) and + FinRL + (AI4Finance-Foundation, + 2022) limit users to the exploration of reinforcement learning + on historical data. These softwares also do not provide robust live + deployments, they do not furnish novel feature engineering algorithms, + and they do not provide custom data analysis tools. + FreqAI fills the gap.

+
+ + On-going research +

Emergent Methods, based in Arvada CO, is actively using + FreqAI to perform large scale experiments aimed + at comparing machine learning libraries in live and historical + environments. Past projects include backtesting parametric sweeps, + while active projects include a 3 week live deployment comparison + between CatboosRegressor, + LightGBMRegressor, and + XGBoostRegressor. Results from these studies + are on track for publication in scientific journals as well as more + general data science blogs (e.g. Medium).

+
+ + Installing and running <monospace>FreqAI</monospace> +

FreqAI is automatically installed with + Freqtrade using the following commands on linux + systems:

+ git clone git@github.com:freqtrade/freqtrade.git +cd freqtrade +./setup.sh -i +

However, FreqAI also benefits from + Freqtrade docker distributions, and can be run + with docker by pulling the stable or develop images from + Freqtrade distributions.

+
+ + Funding sources +

FreqAI + has had no official sponsors, and is entirely grass roots. All + donations into the project (e.g. the GitHub sponsor system) are kept + inside the project to help support development of open-sourced and + communally beneficial features.

+
+ + Acknowledgements +

We would like to acknowledge various beta testers of + FreqAI:

+ + +

Richárd Józsa

+
+ +

Juha Nykänen

+
+ +

Salah Lamkadem

+
+
+

As well as various Freqtrade + developers + maintaining tangential, yet essential, modules.

+
+ + + + + + + PedregosaF. + VaroquauxG. + GramfortA. + MichelV. + ThirionB. + GriselO. + BlondelM. + PrettenhoferP. + WeissR. + DubourgV. + VanderplasJ. + PassosA. + CournapeauD. + BrucherM. + PerrotM. + DuchesnayE. + + Scikit-learn: Machine learning in Python + Journal of Machine Learning Research + 2011 + 12 + 2825 + 2830 + + + + + + ProkhorenkovaLiudmila + GusevGleb + VorobevAleksandr + DorogushAnna Veronika + GulinAndrey + + CatBoost: Unbiased boosting with categorical features + Proceedings of the 32nd international conference on neural information processing systems + Curran Associates Inc. + Red Hook, NY, USA + 2018 + 6639 + 6649 + + + + + + KeGuolin + MengQi + FinleyThomas + WangTaifeng + ChenWei + MaWeidong + YeQiwei + LiuTie-Yan + + Lightgbm: A highly efficient gradient boosting decision tree + Advances in neural information processing systems + 2017 + 30 + 3146 + 3154 + + + + + + ChenTianqi + GuestrinCarlos + + XGBoost: A scalable tree boosting system + Proceedings of the 22nd ACM SIGKDD international conference on knowledge discovery and data mining + ACM + New York, NY, USA + 2016 + 978-1-4503-4232-2 + http://doi.acm.org/10.1145/2939672.2939785 + 10.1145/2939672.2939785 + 785 + 794 + + + + + + RaffinAntonin + HillAshley + GleaveAdam + KanervistoAnssi + ErnestusMaximilian + DormannNoah + + Stable-Baselines3: Reliable reinforcement learning implementations + Journal of Machine Learning Research + 2021 + 22 + 268 + http://jmlr.org/papers/v22/20-1364.html + 1 + 8 + + + + + + BrockmanGreg + CheungVicki + PetterssonLudwig + SchneiderJonas + SchulmanJohn + TangJie + ZarembaWojciech + + OpenAI gym + 2016 + https://arxiv.org/abs/1606.01540 + + + + + + AbadiMartín + AgarwalAshish + BarhamPaul + BrevdoEugene + ChenZhifeng + CitroCraig + CorradoGreg S. + DavisAndy + DeanJeffrey + DevinMatthieu + GhemawatSanjay + GoodfellowIan + HarpAndrew + IrvingGeoffrey + IsardMichael + JiaYangqing + JozefowiczRafal + KaiserLukasz + KudlurManjunath + LevenbergJosh + ManéDandelion + MongaRajat + MooreSherry + MurrayDerek + OlahChris + SchusterMike + ShlensJonathon + SteinerBenoit + SutskeverIlya + TalwarKunal + TuckerPaul + VanhouckeVincent + VasudevanVijay + ViégasFernanda + VinyalsOriol + WardenPete + WattenbergMartin + WickeMartin + YuYuan + ZhengXiaoqiang + + TensorFlow: Large-scale machine learning on heterogeneous systems + 2015 + https://www.tensorflow.org/ + + + + + + PaszkeAdam + GrossSam + MassaFrancisco + LererAdam + BradburyJames + ChananGregory + KilleenTrevor + LinZeming + GimelsheinNatalia + AntigaLuca + DesmaisonAlban + KopfAndreas + YangEdward + DeVitoZachary + RaisonMartin + TejaniAlykhan + ChilamkurthySasank + SteinerBenoit + FangLu + BaiJunjie + ChintalaSoumith + + PyTorch: An imperative style, high-performance deep learning library + Advances in neural information processing systems 32 + + WallachH. + LarochelleH. + BeygelzimerA. + dAlché-BucF. + FoxE. + GarnettR. + + Curran Associates, Inc. + 2019 + http://papers.neurips.cc/paper/9015-pytorch-an-imperative-style-high-performance-deep-learning-library.pdf + 8024 + 8035 + + + + + + VirtanenPauli + GommersRalf + OliphantTravis E. + HaberlandMatt + ReddyTyler + CournapeauDavid + BurovskiEvgeni + PetersonPearu + WeckesserWarren + BrightJonathan + van der WaltStéfan J. + BrettMatthew + WilsonJoshua + MillmanK. Jarrod + MayorovNikolay + NelsonAndrew R. J. + JonesEric + KernRobert + LarsonEric + CareyC J + Polatİlhan + FengYu + MooreEric W. + VanderPlasJake + LaxaldeDenis + PerktoldJosef + CimrmanRobert + HenriksenIan + QuinteroE. A. + HarrisCharles R. + ArchibaldAnne M. + RibeiroAntônio H. + PedregosaFabian + van MulbregtPaul + SciPy 1.0 Contributors + + SciPy 1.0: Fundamental Algorithms for Scientific Computing in Python + Nature Methods + 2020 + 17 + 10.1038/s41592-019-0686-2 + 261 + 272 + + + + + + HarrisCharles R. + MillmanK. Jarrod + WaltStéfan J. van der + GommersRalf + VirtanenPauli + CournapeauDavid + WieserEric + TaylorJulian + BergSebastian + SmithNathaniel J. + KernRobert + PicusMatti + HoyerStephan + KerkwijkMarten H. van + BrettMatthew + HaldaneAllan + RíoJaime Fernández del + WiebeMark + PetersonPearu + Gérard-MarchantPierre + SheppardKevin + ReddyTyler + WeckesserWarren + AbbasiHameer + GohlkeChristoph + OliphantTravis E. + + Array programming with NumPy + Nature + Springer Science; Business Media LLC + 202009 + 585 + 7825 + https://doi.org/10.1038/s41586-020-2649-2 + 10.1038/s41586-020-2649-2 + 357 + 362 + + + + + + McKinneyWes + others + + Data structures for statistical computing in python + Proceedings of the 9th python in science conference + Austin, TX + 2010 + 445 + 51 + 56 + + + + + AI4Finance-foundation + 2022 + 20220930 + https://github.com/AI4Finance-Foundation/FinRL + + + + + Tensortrade + 2022 + 20220930 + https://tensortradex.readthedocs.io/en/latest/L + + + + +
diff --git a/docs/JOSS_paper/paper.md b/docs/JOSS_paper/paper.md new file mode 100644 index 000000000..bfe989dde --- /dev/null +++ b/docs/JOSS_paper/paper.md @@ -0,0 +1,210 @@ +--- +title: '`FreqAI`: generalizing adaptive modeling for chaotic time-series market forecasts' +tags: + - Python + - Machine Learning + - adaptive modeling + - chaotic systems + - time-series forecasting +authors: + - name: Robert A. Caulk Ph.D + orcid: 0000-0001-5618-8629 + affiliation: 1, 2 + - name: Elin Törnquist Ph.D + orcid: 0000-0003-3289-8604 + affiliation: 1, 2 + - name: Matthias Voppichler + orcid: + affiliation: 2 + - name: Andrew R. Lawless + orcid: + affiliation: 2 + - name: Ryan McMullan + orcid: + affiliation: 2 + - name: Wagner Costa Santos + orcid: + affiliation: 1, 2 + - name: Timothy C. Pogue + orcid: + affiliation: 1, 2 + - name: Johan van der Vlugt + orcid: + affiliation: 2 + - name: Stefan P. Gehring + orcid: + affiliation: 2 + - name: Pascal Schmidt + orcid: + affiliation: 2 + + +affiliations: + - name: Emergent Methods LLC, Arvada Colorado, 80005, USA + index: 1 + - name: Freqtrade open source project + index: 2 +date: October 2022 +bibliography: paper.bib + + +--- + + +# Statement of need + +Forecasting chaotic time-series based systems, such as equity/cryptocurrency markets, requires a broad set of tools geared toward testing a wide range of hypotheses. Fortunately, a recent maturation of robust machine learning libraries (e.g. `scikit-learn`), has opened up a wide range of research possibilities. Scientists from a diverse range of fields can now easily prototype their studies on an abundance of established machine learning algorithms. Similarly, these user-friendly libraries enable "citzen scientists" to use their basic Python skills for data-exploration. However, leveraging these machine learning libraries on historical and live chaotic data sources can be logistically difficult and expensive. Additionally, robust data-collection, storage, and handling presents a disparate challenge. [`FreqAI`](https://www.freqtrade.io/en/latest/freqai/) aims to provide a generalized and extensible open-sourced framework geared toward live deployments of adaptive modeling for market forecasting. The `FreqAI` framework is effectively a sandbox for the rich world of open-source machine learning libraries. Inside the `FreqAI` sandbox, users find they can combine a wide variety of third-party libraries to test creative hypotheses on a free live 24/7 chaotic data source - cryptocurrency exchange data. + + +# Summary + +[`FreqAI`](https://www.freqtrade.io/en/latest/freqai/) evolved from a desire to test and compare a range of adaptive time-series forecasting methods on chaotic data. Cryptocurrency markets provide a unique data source since they are operational 24/7 and the data is freely available. Luckily, an existing open-source software, [`Freqtrade`](https://www.freqtrade.io/en/stable/), had already matured under a range of talented developers to support robust data collection/storage, as well as robust live environmental interactions for standard algorithmic trading. `Freqtrade` also provides a set of data analysis/visualization tools for the evaluation of historical performance as well as live environmental feedback. `FreqAI` builds on top of `Freqtrade` to include a user-friendly well tested interface for integrating external machine learning libraries for adaptive time-series forecasting. Beyond enabling the integration of existing libraries, `FreqAI` hosts a range of custom algorithms and methodologies aimed at improving computational and predictive performances. Thus, `FreqAI` contains a range of unique features which can be easily tested in combination with all the existing Python-accessible machine learning libraries to generate novel research on live and historical data. + +The high-level overview of the software is depicted in Figure 1. + +![freqai-algo](assets/freqai_algo.jpg) +*Abstracted overview of FreqAI algorithm* + +## Connecting machine learning libraries + +Although the `FreqAI` framework is designed to accommodate any Python library in the "Model training" and "Feature set engineering" portions of the software (Figure 1), it already boasts a wide range of well documented examples based on various combinations of: + +* scikit-learn [@scikit-learn], Catboost [@catboost], LightGBM [@lightgbm], XGBoost [@xgboost], stable_baselines3 [@stable-baselines3], openai gym [@openai], tensorflow [@tensorflow], pytorch [@pytorch], Scipy [@scipy], Numpy [@numpy], and pandas [@pandas]. + +These mature projects contain a wide range of peer-reviewed and industry standard methods, including: + +* Regression, Classification, Neural Networks, Reinforcement Learning, Support Vector Machines, Principal Component Analysis, point clustering, and much more. + +which are all leveraged in `FreqAI` for users to use as templates or extend with their own methods. + +## Furnishing novel methods and features + +Beyond the industry standard methods available through external libraries - `FreqAI` includes novel methods which are not available anywhere else in the open-source (or scientific) world. For example, `FreqAI` provides : + +* a custom algorithm/methodology for adaptive modeling +* rapid and self-monitored feature engineering tools +* unique model features/indicators +* optimized data collection algorithms +* safely integrated outlier detection methods +* websocket communicated forecasts + +Of particular interest for researchers, `FreqAI` provides the option of large scale experimentation via an optimized websocket communications interface. + +## Optimizing the back-end + +`FreqAI` aims to make it simple for users to combine all the above tools to run studies based in two distinct modules: + +* backtesting studies +* live-deployments + +Both of these modules and their respective data management systems are built on top of [`Freqtrade`](https://www.freqtrade.io/en/latest/), a mature and actively developed cryptocurrency trading software. This means that `FreqAI` benefits from a wide range of tangential/disparate feature developments such as: + +* FreqUI, a graphical interface for backtesting and live monitoring +* telegram control +* robust database handling +* futures/leverage trading +* dollar cost averaging +* trading strategy handling +* a variety of free data sources via CCXT (FTX, Binance, Kucoin etc.) + +These features derive from a strong external developer community that shares in the benefit and stability of a communal CI (Continuous Integration) system. Beyond the developer community, `FreqAI` benefits strongly from the userbase of `Freqtrade`, where most `FreqAI` beta-testers/developers originated. This symbiotic relationship between `Freqtrade` and `FreqAI` ignited a thoroughly tested [`beta`](https://github.com/freqtrade/freqtrade/pull/6832), which demanded a four month beta and [comprehensive documentation](https://www.freqtrade.io/en/latest/freqai/) containing: + +* numerous example scripts +* a full parameter table +* methodological descriptions +* high-resolution diagrams/figures +* detailed parameter setting recommendations + +## Providing a reproducible foundation for researchers + +`FreqAI` provides an extensible, robust, framework for researchers and citizen data scientists. The `FreqAI` sandbox enables rapid conception and testing of exotic hypotheses. From a research perspective, `FreqAI` handles the multitude of logistics associated with live deployments, historical backtesting, and feature engineering. With `FreqAI`, researchers can focus on their primary interests of feature engineering and hypothesis testing rather than figuring out how to collect and handle data. Further - the well maintained and easily installed open-source framework of `FreqAI` enables reproducible scientific studies. This reproducibility component is essential to general scientific advancement in time-series forecasting for chaotic systems. + +# Technical details + +Typical users configure `FreqAI` via two files: + +1. A `configuration` file (`--config`) which provides access to the full parameter list available [here](https://www.freqtrade.io/en/latest/freqai/): + * control high-level feature engineering + * customize adaptive modeling techniques + * set any model training parameters available in third-party libraries + * manage adaptive modeling parameters (retrain frequency, training window size, continual learning, etc.) + +2. A strategy file (`--strategy`) where users: + * list of the base training features + * set standard technical-analysis strategies + * control trade entry/exit criteria + +With these two files, most users can exploit a wide range of pre-existing integrations in `Catboost` and 7 other libraries with a simple command: + +``` +freqtrade trade --config config_freqai.example.json --strategy FreqaiExampleStrategy --freqaimodel CatboostRegressor +``` + +Advanced users will edit one of the existing `--freqaimodel` files, which are simply an children of the `IFreqaiModel` (details below). Within these files, advanced users can customize training procedures, prediction procedures, outlier detection methods, data preparation, data saving methods, etc. This is all configured in a way where they can customize as little or as much as they want. This flexible customization is owed to the foundational architecture in `FreqAI`, which is comprised of three distinct Python objects: + +* `IFreqaiModel` + * A singular long-lived object containing all the necessary logic to collect data, store data, process data, engineer features, run training, and inference models. +* `FreqaiDataKitchen` + * A short-lived object which is uniquely created for each asset/model. Beyond metadata, it also contains a variety of data processing tools. +* `FreqaiDataDrawer` + * Singular long-lived object containing all the historical predictions, models, and save/load methods. + +These objects interact with one another with one goal in mind - to provide a clean data set to machine learning experts/enthusiasts at the user endpoint. These power-users interact with an inherited `IFreqaiModel` that allows them to dig as deep or as shallow as they wish into the inheritence tree. Typical power-users focus their efforts on customizing training procedures and testing exotic functionalities available in third-party libraries. Thus, power-users are freed from the algorithmic weight associated with data management, and can instead focus their energy on testing creative hypotheses. Meanwhile, some users choose to override deeper functionalities within `IFreqaiModel` to help them craft unique data structures and training procedures. + +The class structure and algorithmic details are depicted in the following diagram: + +![image](assets/freqai_algorithm-diagram.jpg) +*Class diagram summarizing object interactions in FreqAI* + +# Online documentation + +The documentation for [`FreqAI`](https://www.freqtrade.io/en/latest/freqai/) is available online at [https://www.freqtrade.io/en/latest/freqai/](https://www.freqtrade.io/en/latest/freqai/) and covers a wide range of materials: + +* Quick-start with a single command and example files - (beginners) +* Introduction to the feature engineering interface and basic configurations - (intermediate users) +* Parameter table with indepth descriptions and default parameter setting recommendations - (intermediate users) +* Data analysis and post-processing - (advanced users) +* Methodological considerations complemented by high resolution figures - (advanced users) +* Instructions for integrating third party machine learning libraries into custom prediction models - (advanced users) +* Software architectural description with class diagram - (developers) +* File structure descriptions - (developers) + +The docs direct users to a variety of pre-made examples which integrate `Catboost`, `LightGBM`, `XGBoost`, `Sklearn`, `stable_baselines3`, `torch`, `tensorflow`. Meanwhile, developers will also find thorough docstrings and type hinting throughout the source code to aid in code readability and customization. + +`FreqAI` also benefits from a strong support network of users and developers on the [`Freqtrade` discord](https://discord.gg/w6nDM6cM4y) as well as on the [`FreqAI` discord](https://discord.gg/xE4RMg4QYw). Within the `FreqAI` discord, users will find a deep and easily searched knowledge base containing common errors. But more importantly, users in the `FreqAI` discord share anectdotal and quantitative observations which compare performance between various third-party libraries and methods. + +# State of the field + +There are two other open-source tools which are geared toward helping users build models for time-series forecasts on market based data. However, each of these tools suffer from a non-generalized frameworks that do not permit comparison of methods and libraries. Additionally, they do not permit easy live-deployments or adaptive-modeling methods. For example, two open-sourced projects called [`tensortrade`](https://tensortradex.readthedocs.io/en/latest/) [@tensortrade] and [`FinRL`](https://github.com/AI4Finance-Foundation/FinRL) [@finrl] limit users to the exploration of reinforcement learning on historical data. These softwares also do not provide robust live deployments, they do not furnish novel feature engineering algorithms, and they do not provide custom data analysis tools. `FreqAI` fills the gap. + +# On-going research + +Emergent Methods, based in Arvada CO, is actively using `FreqAI` to perform large scale experiments aimed at comparing machine learning libraries in live and historical environments. Past projects include backtesting parametric sweeps, while active projects include a 3 week live deployment comparison between `CatboosRegressor`, `LightGBMRegressor`, and `XGBoostRegressor`. Results from these studies are on track for publication in scientific journals as well as more general data science blogs (e.g. Medium). + +# Installing and running `FreqAI` + +`FreqAI` is automatically installed with `Freqtrade` using the following commands on linux systems: + +``` +git clone git@github.com:freqtrade/freqtrade.git +cd freqtrade +./setup.sh -i +``` + +However, `FreqAI` also benefits from `Freqtrade` docker distributions, and can be run with docker by pulling the stable or develop images from `Freqtrade` distributions. + +# Funding sources + +[`FreqAI`](https://www.freqtrade.io/en/latest/freqai/) has had no official sponsors, and is entirely grass roots. All donations into the project (e.g. the GitHub sponsor system) are kept inside the project to help support development of open-sourced and communally beneficial features. + +# Acknowledgements + +We would like to acknowledge various beta testers of `FreqAI`: + +- Richárd Józsa +- Juha Nykänen +- Salah Lamkadem + +As well as various `Freqtrade` [developers](https://github.com/freqtrade/freqtrade/graphs/contributors) maintaining tangential, yet essential, modules. + +# References diff --git a/docs/JOSS_paper/paper.pdf b/docs/JOSS_paper/paper.pdf new file mode 100644 index 000000000..7778617fd Binary files /dev/null and b/docs/JOSS_paper/paper.pdf differ diff --git a/mkdocs.yml b/mkdocs.yml index 81f2b7b0b..9de6be353 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -31,6 +31,7 @@ nav: - Running FreqAI: freqai-running.md - Reinforcement Learning: freqai-reinforcement-learning.md - Developer guide: freqai-developers.md + - JOSS paper: paper.md - Short / Leverage: leverage.md - Utility Sub-commands: utils.md - Plotting: plotting.md