xxxxxxxxxx
1. create retention rule:
{"rules":[{"enabled":true,"name":"DeleteArtifactsTemp","type":"Lifecycle","definition":{"actions":{"baseBlob":{"delete":{"daysAfterCreationGreaterThan":1}}},"filters":{"prefixMatch":["somepath/"],"blobTypes":["blockBlob"],"blobIndexMatch":[{"name":"ttl","op":"=","value":"oneDay"}]}}},{"enabled":true,"name":"DeleteArtifactsShort","type":"Lifecycle","definition":{"actions":{"baseBlob":{"delete":{"daysAfterCreationGreaterThan":5}}},"filters":{"prefixMatch":["somepath/"],"blobTypes":["blockBlob"],"blobIndexMatch":[{"name":"ttl","op":"=","value":"fiveDays"}]}}}]}
2. enable indexing on the azure blob storage container.
3. add metadata KV to files during upload:
ttl=oneDay or ttl=fiveDays
xxxxxxxxxx
import time
from datetime import datetime
from azure.storage.blob import BlobServiceClient
#Code - Initialize Connection String of Azure Storage and Container name using python
blob_client = container_client.upload_blob("sampleblob.txt", data="data_item", metadata={"expiration_time": "2023-05-15T13:54:21Z"})
def check_and_enforce_ttl(container_client):
blobs = container_client.list_blobs()
current_time = int(time.time())
for blob in blobs:
blob_properties = blob_client.get_blob_properties()
metadata = blob_properties.metadata
ttl = metadata.get("ttl")
if ttl is not None:
ttl_seconds = int(ttl)
creation_time_timestamp = int(blob_properties.creation_time.timestamp())
delete_time = creation_time_timestamp + ttl_seconds
if current_time >= delete_time:
blob_client.delete_blob()
print(f"Blob '{blob.name}' has expired and has been deleted.")
else:
pass
check_and_enforce_ttl(container_client)