While I was looking for python api for crypto, I found twelvedata. It’s cheap and seems to be comprehensive.
But I don’t like their python api, e.g., using pandas than polars, siliently setting number of rows to fetch even when start date is set. So I stopped using their api and wrote a small code fragement myself. Hope it is helpful to someone considering their servide.
Note: I’m just their online service customer and is not affiliated with them at all.
import json
import logging
import urllib.parse
import polars as pl
import requests
from data.environ import get_or_die
_BASE_URL = "https://api.twelvedata.com"
logger = logging.getLogger(__name__)
def historical(
*,
symbol: str,
currency: str,
interval: str = "1day",
start_date: str = "1990-01-01",
) -> pl.DataFrame:
params = {
"symbol": f"{symbol}/{currency}",
"interval": interval,
"start_date": start_date,
"format": "JSON",
"order": "asc",
"apikey": get_or_die("TWELVE_DATA_API_KEY"),
}
url = f"{_BASE_URL}/time_series?" + urllib.parse.urlencode(params)
logger.debug(f"Fetching: {url}")
data = _fetch(url)
data = _exclude_today(data)
return _as_polars(data["values"][1:], floats=["close", "high", "low", "open"])
def _exclude_today(data: dict) -> dict:
data["values"] = data["values"][:-1]
return data
def _as_polars(
data: list[dict[str, str]],
*,
floats: list[str] | None = None,
ints: list[str] | None = None,
datetime_col: str | None = "datetime",
) -> pl.DataFrame:
df = pl.DataFrame(data)
conversions: list[pl.Expr] = []
if datetime_col:
conversions.append(pl.col(datetime_col).str.strptime(pl.Date, "%Y-%m-%d"))
for col in floats or []:
conversions.append(pl.col(col).cast(pl.Float32))
for col in ints or []:
conversions.append(pl.col(col).cast(pl.Int32))
return df.with_columns(conversions)
def _fetch(url: str) -> dict:
with requests.get(url) as resp:
if resp.status_code != 200:
raise ValueError(f"HTTP Status Code: {resp.status_code}")
d: dict = json.loads(resp.text)
if d["status"] == "error":
raise ValueError(d["message"])
return d