File size: 2,857 Bytes
a52792d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7900bad
 
a52792d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7900bad
 
 
 
 
 
 
 
 
 
 
a52792d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
"""

BatteRaquette58/airbnb-stock-price (c) by BatteRaquette58



BatteRaquette58/airbnb-stock-price is licensed under a

Creative Commons Attribution-ShareAlike 4.0 International License.



You should have received a copy of the license along with this

work. If not, see <https://creativecommons.org/licenses/by-sa/4.0/>.

"""

from datasets import load_dataset, Dataset
from numpy import array
from datetime import datetime
from time import mktime
from re import match
from pickle import dump
from huggingface_hub import HfApi

# load original datasets
stock_price = array(load_dataset("nateraw/airbnb-stock-price")["train"])
stock_price_2 = array(load_dataset("nateraw/airbnb-stock-price-2")["train"])
datasets = (stock_price, stock_price_2)

def convert_to_timestamp(date: str) -> float:
    "Converts different date format strings from the datasets into a timestamp."

    # detect different date formats, and convert them accordingly
    datetime_obj = None
    if match("[0-9][0-9]-[0-9][0-9]-[0-9][0-9][0-9][0-9]", date):
        datetime_obj = datetime.strptime(date, "%m-%d-%Y").timetuple()
    elif match("[0-9][0-9]/[0-9][0-9]/[0-9][0-9]", date):
        datetime_obj = datetime.strptime(date, "%m/%d/%y").timetuple()
    else:
        raise ValueError(f"Invalid date {date}")
    
    # convert datetime struct into a timestamp
    return mktime(datetime_obj)

def data_generator():
    "Generator yielding the new merged dataset rows."

    dates = [] # to not have duplicate dates

    for stock in datasets:
        for price in stock:
            row = {}
            row["date"] = convert_to_timestamp(price["Date"] if "Date" in price else price["open"])
            dates.append(row["date"])
            row["open"] = price["Open"] if "Open" in price else price["open"]
            row["close_last"] = price["Adj.Close"] if "Adj.Close" in price else price["open"]
            row["volume"] = price["Volume"] if "Volume" in price else price["open"]
            row["high"] = price["High"] if "High" in price else price["open"]
            row["low"] = price["Low"] if "Low" in price else price["open"]
            yield row

# generate dataset object, export them, and push to hub
dataset = Dataset.from_generator(data_generator)
dataset.to_csv("airbnb-stock.csv")
dataset.to_parquet("airbnb-stock.parquet")
dataset.to_json("airbnb-stock.json")
with open("airbnb-stock.pickle", "wb") as file:
    dump(dataset.to_dict(), file)
dataset.push_to_hub("BatteRaquette58/airbnb-stock")

api = HfApi()
for file in ("airbnb-stock.csv", "airbnb-stock.parquet", "airbnb-stock.json", "airbnb-stock.pickle"):
    api.upload_file(
        path_or_fileobj=file,
        path_in_repo=f"data/{file}",
        repo_id="BatteRaquette58/airbnb-stock-price",
        repo_type="dataset",
    )