Skip to content

Commit

Permalink
added CDPK to table creation
Browse files Browse the repository at this point in the history
  • Loading branch information
nfunke committed Nov 17, 2023
1 parent dce0241 commit fccdf3d
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 4 deletions.
4 changes: 3 additions & 1 deletion sample_apps/sql/last_value_fill_forward/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ python3 ./create_batch_load_task.py \
object_key_prefix=<upload_folder> \
data_file=../last_value_fill_forward/sensor_with_gaps.csv \
database=amazon-timestream-tools \
table=sensordata
table=sensordata \
partition_key=gpio
```

| **⚠ Note**: |
Expand All @@ -38,3 +39,4 @@ Parameter | Description
**data_file** | CSV file for this example | `sensor_with_gaps.csv`
**database** | Database in region. Database will be created if not exists. | `amazon-timestream-tools`
**table** | Table where data is loaded. If this table does not exist, the table will be created | `sensordata`
**partition_key**| Custome defined partition key (CDPK) used for this data example | `gpio`
27 changes: 24 additions & 3 deletions sample_apps/sql/utils/create_batch_load_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def load_mapping(file_name):
return None
return data

def create_resources(client, region, database_name, table_name, input_bucket_name, input_object_key_prefix, data_file):
def create_resources(client, region, database_name, table_name, input_bucket_name, input_object_key_prefix, data_file, partition_key):

# Upload data file
s3 = boto3.resource('s3')
Expand Down Expand Up @@ -103,10 +103,23 @@ def create_resources(client, region, database_name, table_name, input_bucket_nam
magnetic_store_write_properties = {
'EnableMagneticStoreWrites': True
}

schema = {
"CompositePartitionKey": [
{
"EnforcementInRecord": "REQUIRED",
"Name": partition_key,
"Type": "DIMENSION"
}
]
}

try:
client.create_table(DatabaseName=database_name, TableName=table_name,
RetentionProperties=retention_properties,
MagneticStoreWriteProperties=magnetic_store_write_properties)
MagneticStoreWriteProperties=magnetic_store_write_properties,
Schema=schema
)
print("Table [%s] successfully created." % table_name)
except client.exceptions.ConflictException:
print("Table [%s] exists on database [%s]. Skipping table creation" % (
Expand Down Expand Up @@ -165,7 +178,15 @@ def create_batch_load_task(client, database_name, table_name, input_bucket_name,
print(write_client.meta.config.user_agent)
print(write_client._service_model.operation_names)

success = create_resources(write_client, parameters['region'], parameters['database'], parameters['table'], parameters['input_bucket'], parameters['object_key_prefix'], parameters['data_file'])
success = create_resources(write_client,
parameters['region'],
parameters['database'],
parameters['table'],
parameters['input_bucket'],
parameters['object_key_prefix'],
parameters['data_file'],
parameters['partition_key']
)



Expand Down

0 comments on commit fccdf3d

Please sign in to comment.