Skip to content

Commit

Permalink
1.1.40
Browse files Browse the repository at this point in the history
  • Loading branch information
SermetPekin committed Aug 6, 2024
1 parent d7aa4d3 commit ea990e4
Show file tree
Hide file tree
Showing 12 changed files with 243 additions and 67 deletions.
54 changes: 35 additions & 19 deletions Example_get_series_exp[2].ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,20 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 17,
"id": "4d1d0463-6413-43cf-a785-5b5e44bc6a35",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Requesting... will be running now...\n",
" TP_DK_USD_A TP_DK_EUR_A TP_DK_CHF_A TP_DK_GBP_A TP_DK_JPY_A Tarih\n",
"0 NaN NaN NaN NaN NaN 01-01-2017\n",
"1 3.5192 3.7099 3.4454 4.3189 3.0025 02-01-2017\n",
"2 3.5338 3.7086 3.4522 4.3488 3.0065 03-01-2017\n",
"3 3.5737 3.7278 3.4744 4.3840 3.0220 04-01-2017\n",
"4 3.5764 3.7291 3.4751 4.3794 3.0292 05-01-2017\n"
"0 3.5192 3.7099 3.4454 4.3189 3.0025 02-01-2017\n",
"1 3.5338 3.7086 3.4522 4.3488 3.0065 03-01-2017\n",
"2 3.5737 3.7278 3.4744 4.3840 3.0220 04-01-2017\n",
"3 3.5764 3.7291 3.4751 4.3794 3.0292 05-01-2017\n",
"4 3.5934 3.7808 3.5184 4.4154 3.0814 06-01-2017\n"
]
}
],
Expand All @@ -47,13 +46,13 @@
"\n",
"\n",
"\"\"\"\n",
"df = get_series(index, start_date=\"01-01-2017\", end_date=\"31-12-2017\" )\n",
"df = get_series(index, cache=False , start_date=\"02-01-2017\", end_date=\"31-12-2017\" )\n",
"print(df.head())\n"
]
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 2,
"id": "d45692f1-8b16-46d4-bbbf-18f2cbf04ce3",
"metadata": {},
"outputs": [
Expand All @@ -62,11 +61,11 @@
"output_type": "stream",
"text": [
"<CacheFound>\n",
"Requesting... will be running now...\n",
"Requesting... will be running now...\n",
"Requesting... will be running now...\n",
"Requesting... will be running now...\n",
"Requesting... will be running now...\n",
"<CacheFound>\n",
"<CacheFound>\n",
"<CacheFound>\n",
"<CacheFound>\n",
"<CacheFound>\n",
" TP_DK_USD_A TP_DK_EUR_A TP_DK_CHF_A TP_DK_GBP_A TP_DK_JPY_A \\\n",
"0 NaN NaN NaN NaN NaN \n",
"1 3.5192 3.7099 3.4454 4.3189 3.0025 \n",
Expand Down Expand Up @@ -114,7 +113,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 3,
"id": "ab1cbb58-e489-4a49-a184-e059565ea992",
"metadata": {},
"outputs": [
Expand Down Expand Up @@ -167,7 +166,25 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 4,
"id": "d52fab7a-9e14-470d-aa44-eecb5e8c2501",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" writing file : [OutputFileName.xlsx] \n"
]
}
],
"source": [
"result.to_excel('OutputFileName.xlsx')"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "08caf97f-b9db-4f18-bdea-71107f6052ce",
"metadata": {},
"outputs": [
Expand Down Expand Up @@ -232,7 +249,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 6,
"id": "d3507317-8c47-4682-81eb-46528eb82b6f",
"metadata": {},
"outputs": [
Expand Down Expand Up @@ -282,7 +299,6 @@
"<CacheFound>\n",
"<CacheFound>\n",
"<CacheFound>\n",
"<CacheFound>\n",
" TP_DK_USD_A TP_DK_EUR_A TP_DK_CHF_A TP_DK_GBP_A TP_DK_JPY_A Tarih\n",
"0 3.691252 3.931128 3.666355 4.561512 3.236822 2017-Q1\n",
"1 3.578649 3.930415 3.620841 4.564195 3.214080 2017-Q2\n",
Expand All @@ -292,7 +308,7 @@
}
],
"source": [
"result = get_series_exp(index, aggregation='avg' , frequency = 'quarterly', start_date=\"01-01-2017\", end_date=\"31-12-2017\" )\n",
"result = get_series_exp(index, cache= False , aggregation='avg' , frequency = 'quarterly', start_date=\"25-01-2017\", end_date=\"31-12-2017\" )\n",
"print(result.data)"
]
}
Expand Down
Binary file modified docs/build/.DS_Store
Binary file not shown.
28 changes: 28 additions & 0 deletions docs/source/clear_cache.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@

clear_cache Function
=======================

The ``clear_cache`` function deletes cache files that were saved inside .caches/evdspy folder

.. code-block:: bash
from evdspy import clear_cache
clear_cache()
If cache parameter is True this function will save time by using previously saved content in caches folder.
default cache period is daily. It will only used if the previous successful request was made the current day.

.. code-block:: python
index = """
TP.DK.USD.A
TP.DK.EUR.A
TP.DK.CHF.A
TP.DK.GBP.A
TP.DK.JPY.A
"""
df = get_series(index, cache=True , start_date="01-01-2017", end_date="31-12-2017" )
print(df.head())
33 changes: 26 additions & 7 deletions docs/source/examples.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,33 @@ Using multiple indexes and cache:

.. code-block:: python
indexes = ("TP.ODEMGZS.BDTTOPLAM", "TP.ODEMGZS.ABD")
df = get_series(indexes, start_date="01-01-2020", frequency="monthly", cache=True)
print(df.head())
from evdspy import get_series , get_series_exp
index = """
TP.DK.USD.A
TP.DK.EUR.A
TP.DK.CHF.A
TP.DK.GBP.A
TP.DK.JPY.A
"""
result = get_series_exp(index, start_date="01-01-2017", end_date="31-12-2017" )
print(result.data)
print(result.metadata)
Applying formulas and aggregation:
cache True for eficient requests. Only checks request result for the current day.

.. code-block:: python
template = "TP.ODEMGZS.BDTTOPLAM"
df = get_series(template, start_date="01-01-2020", formulas="level", aggregation="sum")
print(df.head())
from evdspy import get_series , get_series_exp
index = """
TP.DK.USD.A
TP.DK.EUR.A
TP.DK.CHF.A
TP.DK.GBP.A
TP.DK.JPY.A
"""
result = get_series_exp(index, cache = True , start_date="01-01-2017", end_date="31-12-2017" )
print(result.data)
30 changes: 30 additions & 0 deletions evdspy/EVDSlocal/common/clear_cache_folder.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import os
import shutil
from pathlib import Path

def clear_cache_folder(cache_folder_path : Path ):
if 'cache/evdspy' not in str(cache_folder_path) :

print('This does not look like cache folder. I can only delete content of cache folder of evdspy package')
return
if not os.path.exists(cache_folder_path):
print(f"The folder {cache_folder_path} does not exist.")
return

for filename in os.listdir(cache_folder_path):
file_path = os.path.join(cache_folder_path, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print(f"Failed to delete {file_path}. Reason: {e}")

print(f"All files and subdirectories in [{cache_folder_path}] have been deleted.")


def clear_cache():
from pathlib import Path
cache_folder = Path.home() / ".cache" / "evdspy"
clear_cache_folder(cache_folder)
40 changes: 22 additions & 18 deletions evdspy/EVDSlocal/index_requests/get_series_indexes_exp.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
default_start_date_fnc,
default_end_date_fnc,
)

from evdspy.EVDSlocal.index_requests.get_series_indexes import get_series

def initial_api_process_when_given(api_key: Optional[str] = None) -> None:
from evdspy.EVDSlocal.config.apikey_class import ApikeyClass
Expand Down Expand Up @@ -84,8 +84,8 @@ def get_series_exp(
aggregation: Union[
Literal["avg", "min", "max", "first", "last", "sum", None], None
] = None,
cache: bool = True,
meta_cache: bool = True,
cache: bool = False ,
meta_cache: bool = False ,
proxy: Optional[str] = None,
proxies: Optional[dict[str, str]] = None,
debug: bool = False,
Expand Down Expand Up @@ -154,23 +154,27 @@ def get_series_exp(
aggregation=aggregation,
cache=cache,
)
# ............ProxyManager................................
# # ............ProxyManager................................
proxy_manager = ProxyManager(proxy=proxy, proxies=proxies)
# ............UrlBuilder..................................
url_builder = UrlBuilder(config, url_type=None)
# ............ApiRequester................................
api_requester = ApiRequester(url_builder, proxy_manager)
if debug:
return api_requester.dry_request()
# ............DataProcessor................................
data_processor = DataProcessor(api_requester())

# Fetch the main data
main_data = data_processor()

# Fetch metadata for each index
# # ............UrlBuilder..................................
# url_builder = UrlBuilder(config, url_type=None)
# # ............ApiRequester................................
# api_requester = ApiRequester(url_builder, proxy_manager)
# if debug:
# return api_requester.dry_request()
# # ............DataProcessor................................
# data_processor = DataProcessor(api_requester())

# main_data = data_processor()
main_data = get_series(index ,
start_date=start_date,
end_date=end_date,
frequency=frequency,
formulas=formulas,
aggregation=aggregation,
cache=cache )

metadata_: pd.DataFrame = get_metadata_for_index(index, proxy_manager, cache=meta_cache)
# return pd.DataFrame(liste)

result = {
"main_data": main_data,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,10 @@ def test_cache_or_raw_fnc(capsys):


# ..................................................................................
cache_ = MyCache().cache

def create_cache_version(fnc: Callable):
@MyCache().cache
@cache_
def fnc_cache(*args, **kw):
return fnc(*args, **kw)

Expand Down
Loading

0 comments on commit ea990e4

Please sign in to comment.