Skip to content

Commit

Permalink
remove preceding nan data for uber method
Browse files Browse the repository at this point in the history
  • Loading branch information
realiti4 committed Nov 29, 2024
1 parent 991cd7e commit bdd7780
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 48 deletions.
29 changes: 29 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "tradingfeatures"
version = "0.8.0"
description = "A useful tool to download market history from popular exchanges."
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
keywords = ["download", "market", "history", "binance", "bitfinex", "bitmex", "bitstamp"]
license = { text = "MIT License" }
authors = [
{ name = "Onur Cetinkol", email = "realiti44@gmail.com" }
]
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
]
dependencies = [
"numpy",
"pandas",
"requests",
"tqdm"
]

[tool.setuptools.packages.find]
exclude = ["test", "test.*", "examples", "examples.*", "docs", "docs.*"]
26 changes: 0 additions & 26 deletions setup.py

This file was deleted.

26 changes: 4 additions & 22 deletions tradingfeatures/uber.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,6 @@ def __init__(

self.apis = [self.apis_dict.get(key) for key in api_to_use]

self.bitmex = bitmex()
# self.google_trends = google_trends()

self.columns = (
["open", "low", "high", "close", "volume"] if columns is None else columns
)
Expand All @@ -53,23 +50,23 @@ def eval_get(self, limit=1000, **kwargs):

datasets = [[item[0], item[1].result()[-limit:]] for item in futures]

merged = self.get(
datasets=datasets, save=False, trends=False, date=False, **kwargs
)
merged = self.get(datasets=datasets, save=False, date=False, **kwargs)

# Fix for 0 and nan - check here again later
merged = merged.replace(0, np.nan)
if merged.isnull().values.any(): # Check here later
merged = merged.interpolate()

# Remove preceding nan data
merged = merged.loc[merged.first_valid_index():]

return merged

def get(
self,
path="",
datasets=None,
merge=True,
trends=False,
date=True,
save=True,
**kwargs,
Expand Down Expand Up @@ -139,37 +136,22 @@ def get(
if date:
df_final["date"] = pd.to_datetime(df_final.index, unit="s", utc=True)

if trends:
df_trends = self.google_trends.update("uber_data")
df_final = df_final.join(df_trends)

df_final["google_trends"].replace(0, np.nan, inplace=True)
df_final["google_trends"] = (
df_final["google_trends"].astype(float).interpolate()
)

if save:
df_final.to_csv(path + "/merged_final.csv")

return df_final

def update(self, path="uber_data", **kwargs): # Fix path
working_directory = os.getcwd()
datasets = []

for api in self.apis:
path_df = path + f"/{api.name}.csv"
# df = pd.read_csv(path_df, index_col=0)
# datasets.append([api.name, df])
# # break

if os.path.exists(path_df):
df = api.update(path_df)
else:
print(f"Couldn't find {api.name} data, downloading from strach..")
raise Exception
# df = api.get_hist()
# df.to_csv(path_df)

datasets.append([api.name, df])

Expand Down

0 comments on commit bdd7780

Please sign in to comment.