| { |
| "architectures": [ |
| "Chronos2Model" |
| ], |
| "chronos_config": { |
| "context_length": 8192, |
| "input_patch_size": 16, |
| "input_patch_stride": 16, |
| "max_output_patches": 64, |
| "output_patch_size": 16, |
| "quantiles": [ |
| 0.01, |
| 0.05, |
| 0.1, |
| 0.2, |
| 0.3, |
| 0.4, |
| 0.5, |
| 0.6, |
| 0.7, |
| 0.8, |
| 0.9, |
| 0.95, |
| 0.99 |
| ], |
| "time_encoding_scale": 8192, |
| "use_arcsinh": true, |
| "use_reg_token": true |
| }, |
| "chronos_pipeline_class": "Chronos2Pipeline", |
| "d_ff": 2048, |
| "d_kv": 64, |
| "d_model": 512, |
| "dense_act_fn": "relu", |
| "dropout_rate": 0.1, |
| "feed_forward_proj": "relu", |
| "initializer_factor": 0.05, |
| "is_gated_act": false, |
| "layer_norm_epsilon": 1e-06, |
| "model_type": "t5", |
| "num_heads": 8, |
| "num_layers": 6, |
| "pad_token_id": 0, |
| "reg_token_id": 1, |
| "rope_theta": 10000.0, |
| "torch_dtype": "float32", |
| "transformers_version": "4.49.0", |
| "use_cache": true, |
| "vocab_size": 2 |
| } |
|
|