在AWS中,有多种方式可以训练DeepAR模型,其中最简单的方法是使用Jupyter Notebook来演示构建和测试模型的过程。这里提供了一个基于Python的示例代码:
import pandas as pd
from datetime import datetime
import numpy as np
import json
import boto3
import sagemaker
session = sagemaker.Session()
s3_bucket = ''
s3_prefix = 'deep_ar_demo'
# Use the region-specific AWS DeepAR image.
region_name = boto3.Session().region_name
image_name = '329769244726.dkr.ecr.{}.amazonaws.com/deepar:latest'.format(region_name)
# Create the estimator
estimator = sagemaker.estimator.Estimator(
sagemaker.image_uris.retrieve(image_name, region=session.boto_region_name),
role=sagemaker.get_execution_role(),
instance_count=1,
instance_type='ml.c4.2xlarge',
output_path='s3://{}/{}/output'.format(s3_bucket, s3_prefix),
)
# Setup HyperParameters
# Adjust the hyperparameters for your specific use-case.
freq = 'H'
prediction_length = 24
context_length = 48
epochs = 10
learning_rate = .001
num_cells = 40
num_layers = 2
dropout_rate = .1
batch_size = 32
hyperparameters = {
"time_freq": freq,
"epochs": str(epochs),
"early_stopping_patience": "10",
"context_length": str(context_length),
"prediction_length": str(prediction_length),
"learning_rate": str(learning_rate),
"num_cells": str(num_cells),
"num_layers": str(num_layers),
"dropout_rate": str(dropout_rate),
"mini_batch_size": str(batch_size),
}
estimator.set_hyperparameters(**hyperparameters)
# Train model
data_channels = {
"train": "s3://{}/{}/train/".format(s3_bucket, s3_prefix),
"test": "s3://{}/{}/test/".format(s3_bucket, s3_prefix)
}
estimator.fit(inputs=data_channels)