gluonts.dataset.artificial package#

class gluonts.dataset.artificial.ArtificialDataset(freq)[source]#

Bases: object

Parent class of a dataset that can be generated from code.

generate() gluonts.dataset.common.TrainDatasets[source]#
abstract property metadata: gluonts.dataset.common.MetaData#
abstract property test: List[Dict[str, Any]]#
abstract property train: List[Dict[str, Any]]#
class gluonts.dataset.artificial.ComplexSeasonalTimeSeries(num_series: int = 100, prediction_length: int = 20, freq_str: str = 'D', length_low: int = 30, length_high: int = 200, min_val: float = - 10000, max_val: float = 10000, is_integer: bool = False, proportion_missing_values: float = 0, is_noise: bool = True, is_scale: bool = True, percentage_unique_timestamps: float = 0.07, is_out_of_bounds_date: bool = False, seasonality: Optional[int] = None, clip_values: bool = False)[source]#

Bases: gluonts.dataset.artificial._base.ArtificialDataset

Generate sinus time series that ramp up and reach a certain amplitude, and level and have additional spikes on each sunday.

TODO: This could be converted to a RecipeDataset to avoid code duplication.

make_timeseries(seed: int = 1) List[Dict[str, Any]][source]#
property metadata: gluonts.dataset.common.MetaData#
property test: List[Dict[str, Any]]#
property train: List[Dict[str, Any]]#
class gluonts.dataset.artificial.ConstantDataset(num_timeseries: int = 10, num_steps: int = 30, freq: str = '1h', start: str = '2000-01-01 00:00:00', is_nan: bool = False, is_random_constant: bool = False, is_different_scales: bool = False, is_piecewise: bool = False, is_noise: bool = False, is_long: bool = False, is_short: bool = False, is_trend: bool = False, num_missing_middle: int = 0, is_promotions: bool = False, holidays: Optional[List[pandas._libs.tslibs.timestamps.Timestamp]] = None)[source]#

Bases: gluonts.dataset.artificial._base.ArtificialDataset

compute_data_from_recipe(num_steps: int, constant: Optional[float] = None, one_to_zero: float = 0.1, zero_to_one: float = 0.1, scale_features: float = 200) gluonts.dataset.common.TrainDatasets[source]#
determine_constant(index: int, constant: Optional[float] = None, seed: int = 1) Optional[float][source]#
generate_ts(num_ts_steps: int, is_train: bool = False) List[Dict[str, Any]][source]#
get_num_steps(index: int, num_steps_max: int = 10000, long_freq: int = 4, num_steps_min: int = 2, short_freq: int = 4) int[source]#
insert_missing_vals_middle(ts_len: int, constant: Optional[float]) List[Optional[float]][source]#
static insert_nans_and_zeros(ts_len: int) List[source]#
property metadata: gluonts.dataset.common.MetaData#
piecewise_constant(index: int, num_steps: int) List[source]#
property test: List[Dict[str, Any]]#
property train: List[Dict[str, Any]]#
class gluonts.dataset.artificial.RecipeDataset(recipe: typing.Union[typing.Callable, typing.Dict[str, typing.Callable], typing.List[typing.Tuple[str, typing.Callable]]], metadata: gluonts.dataset.common.MetaData, max_train_length: int, prediction_length: int, num_timeseries: int, trim_length_fun=<function RecipeDataset.<lambda>>, data_start=Timestamp('2014-01-01 00:00:00'))[source]#

Bases: gluonts.dataset.artificial._base.ArtificialDataset

Synthetic data set generated by providing a recipe.

A recipe is either a (non-deterministic) function

f(length: int, global_state: dict) -> dict

or list of (field, function) tuples of the form

(field: str, f(data: dict, length: int, global_state: dict) -> dict)

which is processed sequentially, with data initially set to {}, and each entry updating data[field] to the output of the function call.

dataset_info(train_ds: gluonts.dataset.Dataset, test_ds: gluonts.dataset.Dataset) gluonts.dataset.artificial._base.DatasetInfo[source]#
generate() gluonts.dataset.common.TrainDatasets[source]#
property metadata: gluonts.dataset.common.MetaData#
property test#
property train#
static trim_ts_item_end(x: Dict[str, Any], length: int) Dict[str, Any][source]#

Trim a DataEntry into a training range, by removing the last prediction_length time points from the target and dynamic features.

static trim_ts_item_front(x: Dict[str, Any], length: int) Dict[str, Any][source]#

Trim a DataEntry into a training range, by removing the first offset_front time points from the target and dynamic features.

gluonts.dataset.artificial.constant_dataset() Tuple[gluonts.dataset.artificial._base.DatasetInfo, gluonts.dataset.Dataset, gluonts.dataset.Dataset][source]#
gluonts.dataset.artificial.default_synthetic() Tuple[gluonts.dataset.artificial._base.DatasetInfo, gluonts.dataset.Dataset, gluonts.dataset.Dataset][source]#