Last active
September 6, 2016 15:48
-
-
Save benorama/b8ebe911da703b6ba96d to your computer and use it in GitHub Desktop.
DynamoDB Grails Service
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package agorapulse.core.dynamo | |
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClient | |
import com.amazonaws.services.dynamodbv2.datamodeling.* | |
import com.amazonaws.services.dynamodbv2.model.* | |
import grails.plugin.awssdk.AmazonWebService | |
import org.apache.log4j.Logger | |
import java.lang.reflect.Field | |
import java.lang.reflect.Method | |
import java.text.ParseException | |
import java.text.SimpleDateFormat | |
abstract class DynamoDBService { | |
static transactional = false | |
static final int BATCH_DELETE_LIMIT = 100 | |
static final int DEFAULT_QUERY_LIMIT = 20 | |
static final int DEFAULT_COUNT_LIMIT = 100 | |
static final String SERIALIZED_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" | |
static final String SERIALIZED_DATE_TIMEZONE = 'GMT' | |
static final int WRITE_BATCH_SIZE = 100 // Max number of elements to write at once in DynamoDB (mixed tables) | |
static protected SimpleDateFormat dateFormatter = new SimpleDateFormat(SERIALIZED_DATE_FORMAT) | |
// Set as protected to be accessible from Closures | |
protected AmazonDynamoDBClient client | |
protected String hashKeyName | |
protected Class hashKeyClass | |
protected log = Logger.getLogger(DynamoDBService.class) | |
protected DynamoDBMapper mapper | |
protected Class mainClass | |
protected DynamoDBTable mainTable | |
protected String rangeKeyName | |
protected Class rangeKeyClass | |
protected List<String> secondaryIndexes = new ArrayList<String>() | |
/** | |
* Initialize service for a given mapper class | |
* | |
* @param mainClass | |
* @param amazonWebService | |
*/ | |
protected void init(Class mainClass, | |
AmazonWebService amazonWebService) { | |
if (amazonWebService) { // Ignore when null amazonWebService is passed during Spock tests | |
assert amazonWebService?.dynamoDBMapper | |
this.client = amazonWebService.dynamoDB | |
this.mapper = amazonWebService.dynamoDBMapper | |
this.mainClass = mainClass | |
this.mainTable = (DynamoDBTable) mainClass.getAnnotation(DynamoDBTable.class) | |
dateFormatter.timeZone = TimeZone.getTimeZone(SERIALIZED_DATE_TIMEZONE) | |
if (!mainTable) { | |
throw new RuntimeException("Missing @DynamoDBTable annotation on class: ${mainClass}") | |
} | |
mainClass.getDeclaredMethods().findAll { Method method -> | |
method.name.startsWith('get') || method.name.startsWith('is') | |
}.each { Method method -> | |
// Get hash key | |
if (method.getAnnotation(DynamoDBHashKey.class)) { | |
hashKeyName = ReflectionUtils.getFieldNameByGetter(method, true) | |
hashKeyClass = mainClass.getDeclaredField(hashKeyName).type | |
} | |
// Get range key | |
if (method.getAnnotation(DynamoDBRangeKey.class)) { | |
rangeKeyName = ReflectionUtils.getFieldNameByGetter(method, true) | |
rangeKeyClass = mainClass.getDeclaredField(rangeKeyName).type | |
} | |
// Get secondary indexes | |
DynamoDBIndexRangeKey indexRangeKeyAnnotation = method.getAnnotation(DynamoDBIndexRangeKey.class) | |
if (indexRangeKeyAnnotation) { | |
secondaryIndexes.add(indexRangeKeyAnnotation.localSecondaryIndexName()) | |
} | |
} | |
} | |
} | |
/** | |
* Optional settings: | |
* - consistentRead (default to false) | |
* - limit (default to DEFAULT_COUNT_LIMIT) | |
* | |
* @param hashKey | |
* @param rangeKeyName | |
* @param rangeKeyValue | |
* @param operator | |
* @param settings | |
* @return | |
*/ | |
int count(hashKey, | |
String rangeKeyName, | |
rangeKeyValue, | |
ComparisonOperator operator = ComparisonOperator.EQ, | |
Map settings = [:]) { | |
Map conditions = [(rangeKeyName): buildCondition(rangeKeyName, rangeKeyValue, operator)] | |
countByConditions(hashKey, conditions, settings) | |
} | |
/** | |
* Optional settings: | |
* - consistentRead (default to false) | |
* - limit (default to DEFAULT_COUNT_LIMIT) | |
* | |
* @param hashKey | |
* @param rangeKeyName | |
* @param rangeKeyDates | |
* @param settings | |
* @return | |
*/ | |
int countByDates(hashKey, | |
String rangeKeyName, | |
Map rangeKeyDates, | |
Map settings = [:]) { | |
Map conditions = buildDateConditions(rangeKeyName, rangeKeyDates) | |
countByConditions(hashKey, conditions, settings) | |
} | |
/** | |
* Optional settings: | |
* - consistentRead (default to false) | |
* - limit (default to DEFAULT_COUNT_LIMIT) | |
* | |
* @param hashKey | |
* @param rangeKeyConditions | |
* @param settings | |
* @return | |
*/ | |
int countByConditions(hashKey, | |
Map<String, Condition> rangeKeyConditions, | |
Map settings = [:]) { | |
settings.batchGetDisabled = true | |
if (!settings.limit) { | |
settings.limit = DEFAULT_COUNT_LIMIT | |
} | |
QueryResultPage resultPage = queryByConditions(hashKey, rangeKeyConditions, settings) | |
resultPage?.results.size() ?: 0 | |
} | |
/** | |
* Create the DynamoDB table for the given Class. | |
* | |
* @param classToCreate Class to create the table for | |
* @param readCapacityUnits default to 10 | |
* @param writeCapacityUnits default to 5 | |
*/ | |
def createTable(Class classToCreate = null, | |
Long readCapacityUnits = 10, | |
Long writeCapacityUnits = 5) { | |
if (!classToCreate) { | |
classToCreate = mainClass | |
} | |
DynamoDBTable table = classToCreate.getAnnotation(DynamoDBTable.class) | |
try { | |
// Check if the table exists | |
client.describeTable(table.tableName()) | |
} catch (ResourceNotFoundException e) { | |
CreateTableRequest createTableRequest = mapper.generateCreateTableRequest(classToCreate) // new CreateTableRequest().withTableName(table.tableName()) | |
// ProvisionedThroughput | |
ProvisionedThroughput provisionedThroughput = new ProvisionedThroughput() | |
.withReadCapacityUnits(readCapacityUnits) | |
.withWriteCapacityUnits(writeCapacityUnits) | |
createTableRequest.setProvisionedThroughput(provisionedThroughput) | |
log.info("Creating DynamoDB table: ${createTableRequest}") | |
client.createTable(createTableRequest) | |
} | |
} | |
/** | |
* Decrement a count with an atomic operation | |
* | |
* @param hashKey | |
* @param rangeKey | |
* @param attributeName | |
* @param attributeIncrement | |
* @return | |
*/ | |
Integer decrement(hashKey, | |
rangeKey, | |
String attributeName, | |
int attributeIncrement = 1) { | |
increment(hashKey, rangeKey, attributeName, -attributeIncrement) | |
} | |
/** | |
* Delete item by IDs. | |
* | |
* @param hashKey hash key of the item to delete | |
* @param rangeKey range key of the item to delete | |
* @param settings settings | |
*/ | |
void delete(hashKey, | |
rangeKey, | |
Map settings = [:]) { | |
delete(mainClass.newInstance((hashKeyName): hashKey, (rangeKeyName): rangeKey), settings) | |
} | |
/** | |
* Delete item from Java object | |
* | |
* @param item | |
* @param settings | |
*/ | |
void delete(item, | |
Map settings = [:]) { | |
deleteAll([item], settings) | |
} | |
/** | |
* Delete a list of items from DynamoDB. | |
* | |
* @param itemsToDelete a list of objects to delete | |
* @param settings settings | |
*/ | |
def deleteAll(List itemsToDelete, | |
Map settings = [:]) { | |
if (!settings.containsKey('batchEnabled')) { | |
settings.batchEnabled = true | |
} | |
if (settings.batchEnabled && itemsToDelete.size() > 1) { | |
itemsToDelete.collate(WRITE_BATCH_SIZE).each { List batchItems -> | |
log.debug("Deleting items from DynamoDB ${batchItems}") | |
mapper.batchDelete(batchItems) | |
} | |
} else { | |
itemsToDelete.each { | |
log.debug("Deleting item from DynamoDB ${it}") | |
mapper.delete(it) | |
} | |
} | |
} | |
/** | |
* Delete all items for a given hashKey | |
* | |
* @param hashKey | |
* @param settings | |
* @return | |
*/ | |
int deleteAll(hashKey, | |
Map settings = [:]) { | |
deleteAllByConditions( | |
hashKey, | |
[:], | |
settings | |
) | |
} | |
/** | |
* Delete all items f | |
* | |
* @param hashKey | |
* @param rangeKeyName | |
* @param rangeKeyValue | |
* @param operator | |
* @param settings | |
* @return | |
*/ | |
int deleteAll(hashKey, | |
String rangeKeyName, | |
rangeKeyValue, | |
ComparisonOperator operator = ComparisonOperator.BEGINS_WITH, | |
Map settings = [:]) { | |
Map conditions = [(rangeKeyName): buildCondition(rangeKeyValue, operator)] | |
deleteAllByConditions( | |
hashKey, | |
conditions, | |
settings | |
) | |
} | |
/** | |
* | |
* @param hashKey | |
* @param rangeKeyConditions | |
* @param settings | |
* @param indexName | |
* @return | |
*/ | |
int deleteAllByConditions(hashKey, | |
Map<String, Condition> rangeKeyConditions, | |
Map settings = [:], | |
String indexName = '') { | |
if (!settings.containsKey('batchEnabled')) { | |
settings.batchEnabled = true | |
} | |
if (!settings.limit) { | |
settings.limit = BATCH_DELETE_LIMIT | |
} | |
DynamoDBQueryExpression query = buildQueryExpression(hashKeyName, hashKey, settings) | |
query.hashKeyValues = mainClass.newInstance((hashKeyName): hashKey) | |
if (rangeKeyConditions) { | |
query.rangeKeyConditions = rangeKeyConditions | |
} | |
if (indexName) { | |
query.indexName = indexName | |
} | |
QueryResultPage itemsPage = mapper.queryPage(mainClass, query) | |
int deletedItemsCount = -1 | |
Map lastEvaluatedKey = itemsPage.lastEvaluatedKey | |
while (lastEvaluatedKey || deletedItemsCount == -1) { | |
if (deletedItemsCount == -1) { | |
deletedItemsCount = 0 | |
} else { | |
query.exclusiveStartKey = lastEvaluatedKey | |
} | |
itemsPage = mapper.queryPage(mainClass, query) | |
if (itemsPage.results) { | |
log.debug "Deleting ${itemsPage.results.size()} items, class: ${mainClass}" | |
deletedItemsCount = deletedItemsCount + itemsPage.results.size() | |
// Delete all items | |
deleteAll(itemsPage.results, settings) | |
} | |
lastEvaluatedKey = itemsPage.lastEvaluatedKey | |
} | |
log.debug "Successfully deleted ${deletedItemsCount} items" | |
deletedItemsCount | |
} | |
/** | |
* Load an item | |
* | |
* @param hashKey | |
* @param rangeKey | |
* @return | |
*/ | |
def get(hashKey, | |
rangeKey) { | |
mapper.load(mainClass, hashKey, rangeKey) | |
} | |
/** | |
* Retrieve batched items corresponding to a list of item IDs, in the same order. | |
* Example: items = twitterItemDBService.getAll(1, [1, 2]). | |
* | |
* @param hashKey Hash Key of the items to retrieve | |
* @param rangeKey Range keys of the items to retrieve | |
* @param settings only used for setting throttle/readCapacityUnit when getting large sets | |
* @return a list of DynamoDBItem | |
*/ | |
List getAll(hashKey, | |
List rangeKeys, | |
Map settings = [:]) { | |
Map result = [:] | |
List objects = rangeKeys.unique().collect { it -> mainClass.newInstance((hashKeyName): hashKey, (rangeKeyName): it) } | |
if (settings.throttle) { | |
int resultCursor = 0 | |
long readCapacityUnit = settings.readCapacityUnit | |
if (!readCapacityUnit) { | |
DescribeTableResult tableResult = client.describeTable(mainTable.tableName()) | |
readCapacityUnit = tableResult?.getTable()?.provisionedThroughput?.readCapacityUnits ?: 10 | |
} | |
objects.collate(20).each { List batchObjects -> | |
result += mapper.batchLoad(batchObjects) | |
resultCursor++ | |
if (readCapacityUnit && resultCursor >= (readCapacityUnit * 0.8)) { | |
resultCursor = 0 | |
sleep(1000) | |
} | |
} | |
} else { | |
result = mapper.batchLoad(objects) | |
} | |
if (result[mainTable.tableName()]) { | |
List unorderedItems = result[mainTable.tableName()] | |
List items = [] | |
// Build an item list ordered in the same manner as the list of IDs we've been passed | |
rangeKeys.each { rangeKey -> | |
def matchingItem = unorderedItems.find { item -> | |
item[rangeKeyName] == rangeKey | |
} | |
if (matchingItem) { | |
items.add(matchingItem) | |
} | |
// Remove the matching item from the unordered list to reduce the number of loops in the find above | |
unorderedItems.remove(matchingItem) | |
} | |
items | |
} else { | |
[] | |
} | |
} | |
/** | |
* Increment a count with an atomic operation | |
* | |
* @param hashKey | |
* @param rangeKey | |
* @param attributeName | |
* @param attributeIncrement | |
* @return | |
*/ | |
Integer increment(hashKey, | |
rangeKey, | |
String attributeName, | |
int attributeIncrement = 1) { | |
UpdateItemResult result = updateItemAttribute(hashKey, rangeKey, attributeName, attributeIncrement, AttributeAction.ADD) | |
result?.attributes[attributeName]?.getN()?.toInteger() | |
} | |
/** | |
* Optional settings: | |
* - batchGetDisabled (only when secondary indexes are used, useful for count when all item attributes are not required) | |
* - consistentRead (default to false) | |
* - exclusiveStartKey a map with the rangeKey (ex: [id: 2555]), with optional indexRangeKey when using LSI (ex.: [id: 2555, totalCount: 45]) | |
* - limit | |
* - returnAll disable paging to return all items, WARNING: can be expensive in terms of throughput (default to false) | |
* - scanIndexForward (default to false) | |
* | |
* @param hashKey | |
* @param settings | |
* @return | |
*/ | |
QueryResultPage query(hashKey, | |
Map settings = [:]) { | |
queryByConditions(hashKey, [:], settings) | |
} | |
/** | |
* Optional settings: | |
* - batchGetDisabled (only when secondary indexes are used, useful for count when all item attributes are not required) | |
* - consistentRead (default to false) | |
* - exclusiveStartKey a map with the rangeKey (ex: [id: 2555]), with optional indexRangeKey when using LSI (ex.: [id: 2555, totalCount: 45]) | |
* - limit | |
* - returnAll disable paging to return all items, WARNING: can be expensive in terms of throughput (default to false) | |
* - scanIndexForward (default to false) | |
* | |
* @param hashKey | |
* @param rangeKeyName | |
* @param rangeKeyValue | |
* @param operator | |
* @param settings | |
* @return | |
*/ | |
QueryResultPage query(hashKey, | |
String rangeKeyName, | |
rangeKeyValue, | |
ComparisonOperator operator = ComparisonOperator.EQ, | |
Map settings = [:]) { | |
Map conditions = [(rangeKeyName): buildCondition(rangeKeyValue, operator)] | |
queryByConditions(hashKey, conditions, settings) | |
} | |
/** | |
* Optional settings: | |
* - batchGetDisabled (only when secondary indexes are used, useful for count when all item attributes are not required) | |
* - consistentRead (default to false) | |
* - exclusiveStartKey a map with the rangeKey (ex: [id: 2555]), with optional indexRangeKey when using LSI (ex.: [id: 2555, totalCount: 45]) | |
* - limit | |
* - returnAll disable paging to return all items, WARNING: can be expensive in terms of throughput (default to false) | |
* - scanIndexForward (default to false) | |
* - throttle insert sleeps during execution to avoid reaching provisioned read throughput (default to false) | |
* | |
* @param hashKey | |
* @param rangeKeyConditions | |
* @param settings | |
* @return | |
*/ | |
QueryResultPage queryByConditions(hashKey, | |
Map<String, Condition> rangeKeyConditions, | |
Map settings = [:], | |
String indexName = '') { | |
DynamoDBQueryExpression query = buildQueryExpression(hashKeyName, hashKey, settings) | |
query.hashKeyValues = mainClass.newInstance((hashKeyName): hashKey) | |
if (rangeKeyConditions) { | |
query.rangeKeyConditions = rangeKeyConditions | |
} | |
if (indexName) { | |
query.indexName = indexName | |
} | |
long readCapacityUnit = 0 | |
int resultCursor = 0 | |
QueryResultPage resultPage = new QueryResultPage() | |
if (settings.returnAll) { | |
// Get table read Throughput | |
if (settings.throttle) { | |
DescribeTableResult tableResult = client.describeTable(mainTable.tableName()) | |
readCapacityUnit = tableResult?.getTable()?.provisionedThroughput?.readCapacityUnits ?: 0 | |
} | |
// Query all | |
String lastEvaluatedKey = "0" | |
resultPage.results = [] | |
while (lastEvaluatedKey) { | |
QueryResultPage currentPage = mapper.queryPage(mainClass, query) | |
resultPage.results.addAll(currentPage.results) | |
lastEvaluatedKey = currentPage.lastEvaluatedKey | |
if (settings.throttle) { | |
resultCursor++ | |
if (readCapacityUnit && resultCursor >= (readCapacityUnit * 0.8)) { | |
resultCursor = 0 | |
sleep(1000) | |
} | |
} | |
} | |
} else { | |
// Query page | |
resultPage = mapper.queryPage(mainClass, query) | |
} | |
if (resultPage && (rangeKeyConditions || indexName) && !settings.batchGetDisabled) { | |
// Indexes result only provides hash+range attributes, we need to batch get all items | |
List rangeKeys = resultPage.results.collect { it[rangeKeyName] } | |
if (rangeKeys) { | |
resultPage.results = getAll(hashKey, rangeKeys, settings + [readCapacityUnit: readCapacityUnit]) | |
} | |
} | |
resultPage | |
} | |
/** | |
* Query by dates with 'after' and/or 'before' range value | |
* 1) After a certain date : [after: new Date()] | |
* 2) Before a certain date : [before: new Date()] | |
* 3) Between provided dates : [after: new Date() + 1, before: new Date()] | |
* | |
* Optional settings: | |
* - batchGetDisabled (only when secondary indexes are used, useful for count when all item attributes are not required) | |
* - consistentRead (default to false) | |
* - exclusiveStartKey a map with the rangeKey (ex: [id: 2555]), with optional indexRangeKey when using LSI (ex.: [id: 2555, totalCount: 45]) | |
* - limit | |
* - scanIndexForward (default to false) | |
* | |
* @param hashKey | |
* @param rangeKeyName | |
* @param rangeKeyDates | |
* @param settings | |
* @return | |
*/ | |
QueryResultPage queryByDates(hashKey, | |
String rangeKeyName, | |
Map rangeKeyDates, | |
Map settings = [:]) { | |
Map conditions = buildDateConditions(rangeKeyName, rangeKeyDates) | |
queryByConditions(hashKey, conditions, settings) | |
} | |
/** | |
* Save an item. | |
* | |
* @param item the item to save | |
* @param settings settings | |
* @return the Item after it's been saved | |
*/ | |
def save(item, | |
Map settings = [:]) { | |
saveAll([item], settings).first() | |
} | |
/** | |
* Save a list of objects in DynamoDB. | |
* | |
* @param itemsToSave a list of objects to save | |
* @param settings settings | |
*/ | |
def saveAll(List itemsToSave, | |
Map settings = [:]) { | |
if (!settings.containsKey('batchEnabled')) { | |
settings.batchEnabled = true | |
} | |
// Nullify empty collection properties | |
itemsToSave.each { object -> | |
object.properties.each { String prop, val -> | |
if (object.hasProperty(prop) | |
&& object[prop] instanceof HashSet | |
&& object[prop]?.size() == 0) { | |
// log.debug("Nullifying collection ${prop} before sending to DynamoDB") | |
object[prop] = null | |
} | |
} | |
} | |
log.debug "Saving items in DynamoDB ${itemsToSave}" | |
if (settings.batchEnabled && itemsToSave.size() > 1) { | |
itemsToSave.collate(WRITE_BATCH_SIZE).each { List batchItems -> | |
log.debug "Saving batched items in DynamoDB ${batchItems}" | |
List failedBatchResult = settings.config ? mapper.batchSave(batchItems, settings.config) : mapper.batchSave(batchItems) | |
if (failedBatchResult) { | |
failedBatchResult.each { DynamoDBMapper.FailedBatch failedBatch -> | |
log.error "Failed batch with ${failedBatch.unprocessedItems.size()} unprocessed items" | |
log.error "Exception: ${failedBatch.exception}" | |
throw failedBatch.exception | |
} | |
} | |
} | |
} else { | |
itemsToSave.each { | |
log.debug "Saving item in DynamoDB ${it}" | |
settings.config ? mapper.save(it, settings.config as DynamoDBMapperConfig) : mapper.save(it) | |
} | |
} | |
} | |
/** | |
* Update a single item attribute | |
* | |
* @param hashKey | |
* @param rangeKey | |
* @param attributeName | |
* @param attributeValue | |
* @param action | |
* @return | |
*/ | |
UpdateItemResult updateItemAttribute(hashKey, | |
rangeKey, | |
String attributeName, | |
attributeValue, | |
AttributeAction action = AttributeAction.PUT) { | |
UpdateItemRequest request = new UpdateItemRequest( | |
tableName: mainTable.tableName(), | |
key: [ | |
(hashKeyName): buildAttributeValue(hashKey), | |
(rangeKeyName): buildAttributeValue(rangeKey) | |
], | |
returnValues: ReturnValue.UPDATED_NEW | |
).addAttributeUpdatesEntry( | |
attributeName, | |
new AttributeValueUpdate( | |
action: action, | |
value: buildAttributeValue(attributeValue) | |
) | |
) | |
client.updateItem(request) | |
} | |
static Date deserializeDate(String date) throws ParseException { | |
dateFormatter.parse(date) | |
} | |
static String serializeDate(Date date) { | |
dateFormatter.format(date) | |
} | |
/** | |
* | |
* @param key | |
* @return | |
*/ | |
static protected AttributeValue buildAttributeValue(Object key) { | |
if (key.toString().isNumber()) { | |
new AttributeValue().withN(key.toString()) | |
} else if (key instanceof Boolean) { | |
new AttributeValue().withN(key ? "1" : "0") | |
} else if (key instanceof Date) { | |
new AttributeValue().withS(dateFormatter.format(key)) | |
} else { | |
new AttributeValue().withS(key.toString()) | |
} | |
} | |
/** | |
* | |
* @param rangeKeyValue | |
* @param operator | |
* @return | |
*/ | |
static protected Condition buildCondition(rangeKeyValue, | |
ComparisonOperator operator = ComparisonOperator.EQ) { | |
new Condition() | |
.withComparisonOperator(operator) | |
.withAttributeValueList(buildAttributeValue(rangeKeyValue)) | |
} | |
/** | |
* | |
* @param rangeKeyName | |
* @param rangeKeyDates | |
* @return | |
*/ | |
static protected Map buildDateConditions(String rangeKeyName, | |
Map rangeKeyDates) { | |
assert rangeKeyDates.keySet().any { it in ['after', 'before'] } | |
ComparisonOperator operator | |
List attributeValueList = [] | |
if (rangeKeyDates.containsKey('after')) { | |
operator = ComparisonOperator.GE | |
attributeValueList << new AttributeValue().withS(serializeDate(rangeKeyDates['after'])) | |
} | |
if (rangeKeyDates.containsKey('before')) { | |
operator = ComparisonOperator.LE | |
attributeValueList << new AttributeValue().withS(serializeDate(rangeKeyDates['before'])) | |
} | |
if (rangeKeyDates.containsKey('after') && rangeKeyDates.containsKey('before')) { | |
operator = ComparisonOperator.BETWEEN | |
} | |
[ | |
(rangeKeyName): new Condition() | |
.withComparisonOperator(operator) | |
.withAttributeValueList(attributeValueList) | |
] | |
} | |
/** | |
* | |
* @param hashKeyName | |
* @param hashKey | |
* @param settings | |
* @return | |
*/ | |
static protected DynamoDBQueryExpression buildQueryExpression(hashKeyName, | |
hashKey, | |
Map settings = [:]) { | |
DynamoDBQueryExpression query = new DynamoDBQueryExpression() | |
if (settings.containsKey('consistentRead')) { | |
query.consistentRead = settings.consistentRead | |
} | |
if (settings.exclusiveStartKey) { | |
assert settings.exclusiveStartKey instanceof Map | |
query.exclusiveStartKey = buildStartKey(settings.exclusiveStartKey + [(hashKeyName): hashKey]) | |
} | |
if (settings.limit) { | |
assert settings.limit.toString().isNumber() | |
query.limit = settings.limit | |
} else { | |
query.limit = DEFAULT_QUERY_LIMIT | |
} | |
if (settings.containsKey('scanIndexForward')) { | |
query.scanIndexForward = settings.scanIndexForward | |
} else { | |
query.scanIndexForward = false | |
} | |
query | |
} | |
static protected Map buildStartKey(Map map) { | |
map.inject([:]) { startKey, it -> | |
startKey[it.key] = buildAttributeValue(it.value) | |
startKey | |
} | |
} | |
/** | |
* Returns the DynamoDB type for a given Field. | |
* | |
* Currently handled types : | |
* 1) Primitive numbers : short, int, long, float, double | |
* 2) String | |
* Any other type will result in an exception. | |
* | |
* @param field Field to determine the type for | |
* @return the DynamoDB type associated to the given field | |
*/ | |
static protected String getDynamoType(Field field) { | |
if (field.type.name in ['short', 'int', 'long', 'float', 'double']) { | |
return 'N' | |
} else if (field.type.name == 'java.lang.String') { | |
return 'S' | |
} else { | |
throw new RuntimeException("DynamoDB Invalid property type: ${field.type.name}, property: ${field.name}") | |
} | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* Sample user based on DynamoDB java mapper | |
*/ | |
package agorapulse.core.sample; | |
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBAttribute; | |
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBHashKey; | |
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBRangeKey; | |
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTable; | |
@DynamoDBTable(tableName="SampleUser") | |
public class SampleUser { | |
private long hashKey; // Hash key | |
private long rangeKey; // Range key | |
private String name = ""; | |
@DynamoDBHashKey | |
public long getHashKey() { | |
return hashKey; | |
} | |
public void setHashKey(long hashKey) { | |
this.hashKey = hashKey; | |
} | |
@DynamoDBRangeKey | |
public long getRangeKey() { | |
return rangeKey; | |
} | |
public void setRangeKey(long rangeKey) { | |
this.rangeKey = rangeKey; | |
} | |
@DynamoDBAttribute | |
public String getName() { | |
return name; | |
} | |
public void setName(String name) { | |
this.name = name; | |
} | |
@Override | |
public String toString() { | |
return "SampleUser{" + | |
"hashKey=" + hashKey + | |
", rangeKey=" + rangeKey + | |
'}'; | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* DynamoDB DAO | |
* Usage example: | |
* // Get user | |
* SampleUser user = sampleUserDBService.get(1, 1) as SampleUser | |
* if (!user) { | |
* user = new SampleUser( | |
* hashKey: 1, | |
* rangeKey: 1, | |
* name: 'foo' | |
* ) | |
*} | |
* | |
* // Save user | |
* sampleUserDBService.save(user) | |
* | |
* // Update user name | |
* sampleUserDBService.updateItemAttribute(1, 1, 'name', 'bar') | |
* | |
* // List users by hash key | |
* sampleUserDBService.query(1) | |
* | |
* // Delete user | |
* sampleUserDBService.delete(user) // or sampleUserDBService.delete(1, 1) | |
**/ | |
package agorapulse.core.sample | |
import agorapulse.core.dynamo.DynamoDBService | |
import grails.plugin.awssdk.AmazonWebService | |
import javax.annotation.PostConstruct | |
class SampleUserDBService extends DynamoDBService { | |
static transactional = false | |
AmazonWebService amazonWebService | |
@PostConstruct | |
def init() { | |
// Provide the AWS beans to the parent class | |
super.init(SampleUser.class, amazonWebService) | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment