这是indexloc提供的服务,不要输入任何密码
Skip to content
This repository was archived by the owner on May 17, 2021. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public class AbstractDynamoDBItemSerializationTest {
* Generic function testing serialization of item state to internal format in DB. In other words, conversion of
* Item with state to DynamoDBItem
*
* @param state item state
* @param state item state
* @param expectedState internal format in DB representing the item state
* @return dynamo db item
* @throws IOException
Expand All @@ -68,8 +68,10 @@ public DynamoDBItem<?> testStateGeneric(State state, Object expectedState) throw
assertEquals("item1", dbItem.getName());
assertEquals(date, dbItem.getTime());
if (expectedState instanceof BigDecimal) {
assertTrue(DynamoDBBigDecimalItem.loseDigits(((BigDecimal) expectedState))
.compareTo((((DynamoDBItem<BigDecimal>) dbItem).getState())) == 0);
BigDecimal expectedDigitsLost = DynamoDBBigDecimalItem.loseDigits(((BigDecimal) expectedState));
BigDecimal actual = ((DynamoDBItem<BigDecimal>) dbItem).getState();
assertTrue(String.format("Expected state %s (%s but with some digits lost) did not match actual state %s",
expectedDigitsLost, expectedState, actual), expectedDigitsLost.compareTo(actual) == 0);
} else {
assertEquals(expectedState, dbItem.getState());
}
Expand All @@ -79,8 +81,8 @@ public DynamoDBItem<?> testStateGeneric(State state, Object expectedState) throw
/**
* Test state deserialization, that is DynamoDBItem conversion to HistoricItem
*
* @param dbItem dynamo db item
* @param item parameter for DynamoDBItem.asHistoricItem
* @param dbItem dynamo db item
* @param item parameter for DynamoDBItem.asHistoricItem
* @param expectedState Expected state of the historic item. DecimalTypes are compared with reduced accuracy
* @return
* @throws IOException
Expand Down Expand Up @@ -181,33 +183,33 @@ public void testPointTypeWithLocationItem() throws IOException {

@Test
public void testDecimalTypeWithNumberItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new DecimalType(3.2), new BigDecimal(3.2));
DynamoDBItem<?> dbitem = testStateGeneric(new DecimalType(3.2), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new NumberItem("foo"), new DecimalType(3.2));
}

@Test
public void testPercentTypeWithColorItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal(3.2)), new BigDecimal(3.2));
testAsHistoricGeneric(dbitem, new ColorItem("foo"), new PercentType(new BigDecimal(3.2)));
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new ColorItem("foo"), new PercentType(new BigDecimal("3.2")));
}

@Test
public void testPercentTypeWithDimmerItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal(3.2)), new BigDecimal(3.2));
testAsHistoricGeneric(dbitem, new DimmerItem("foo"), new PercentType(new BigDecimal(3.2)));
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new DimmerItem("foo"), new PercentType(new BigDecimal("3.2")));
}

@Test
public void testPercentTypeWithRollerShutterItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal(3.2)), new BigDecimal(3.2));
testAsHistoricGeneric(dbitem, new RollershutterItem("foo"), new PercentType(new BigDecimal(3.2)));
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new RollershutterItem("foo"), new PercentType(new BigDecimal("3.2")));
}

@Test
public void testPercentTypeWithNumberItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal(3.2)), new BigDecimal(3.2));
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
// note: comes back as DecimalType instead of the original PercentType
testAsHistoricGeneric(dbitem, new NumberItem("foo"), new DecimalType(new BigDecimal(3.2)));
testAsHistoricGeneric(dbitem, new NumberItem("foo"), new DecimalType(new BigDecimal("3.2")));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
import com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException;

/**
*
*
* @author Sami Salonen
*
*/
Expand Down Expand Up @@ -112,6 +112,9 @@ public void addItemRegistryChangeListener(ItemRegistryChangeListener listener) {
config.put("secretKey", System.getProperty("DYNAMODBTEST_SECRET"));
config.put("tablePrefix", "dynamodb-integration-tests-");

// Disable buffering
config.put("bufferSize", "0");

for (Entry<String, Object> entry : config.entrySet()) {
if (entry.getValue() == null) {
logger.warn(String.format(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ public void testRegionWithAccessKeys() throws Exception {
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}

@Test
Expand All @@ -72,6 +74,8 @@ public void testRegionWithProfilesConfigFile() throws Exception {
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}

@Test
Expand Down Expand Up @@ -115,6 +119,8 @@ public void testRegionWithAccessKeysWithPrefix() throws Exception {
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}

@Test
Expand All @@ -128,6 +134,8 @@ public void testRegionWithAccessKeysWithPrefixWithCreateTable() throws Exception
assertEquals(false, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}

@Test
Expand All @@ -141,6 +149,8 @@ public void testRegionWithAccessKeysWithPrefixWithReadCapacityUnits() throws Exc
assertEquals(true, fromConfig.isCreateTable());
assertEquals(5, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}

@Test
Expand All @@ -154,6 +164,8 @@ public void testRegionWithAccessKeysWithPrefixWithWriteCapacityUnits() throws Ex
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(5, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}

@Test
Expand All @@ -167,5 +179,24 @@ public void testRegionWithAccessKeysWithPrefixWithReadWriteCapacityUnits() throw
assertEquals(true, fromConfig.isCreateTable());
assertEquals(3, fromConfig.getReadCapacityUnits());
assertEquals(5, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}

@Test
public void testRegionWithAccessKeysWithPrefixWithReadWriteCapacityUnitsWithBufferSettings() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(
ImmutableMap.<String, Object> builder().put("region", "eu-west-1").put("accessKey", "access1")
.put("secretKey", "secret1").put("readCapacityUnits", "3").put("writeCapacityUnits", "5")
.put("bufferCommitIntervalMillis", "501").put("bufferSize", "112").build());
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(3, fromConfig.getReadCapacityUnits());
assertEquals(5, fromConfig.getWriteCapacityUnits());
assertEquals(501L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(112, fromConfig.getBufferSize());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,24 @@
<advanced>true</advanced>
</parameter>

<parameter name="bufferCommitIntervalMillis" type="integer" min="0">
<label>Buffer commit interval (ms)</label>
<description><![CDATA[Interval to commit (write) buffered data. In milliseconds.
<br />
<br />Use zero to commit buffered data only when buffer gets full (not recommended).]]></description>
<default>1000</default>
<advanced>true</advanced>
</parameter>

<parameter name="bufferSize" type="integer" min="0">
<label>Buffer size</label>
<description><![CDATA[Internal buffer size which is used to batch writes to DynamoDB every bufferCommitIntervalMillis.
<br />
<br />Use zero to disable buffering, and write data immediately to DynamoDB. Please note this might have adverse impact to openHAB performance.]]></description>
<default>1000</default>
<advanced>true</advanced>
</parameter>

</config-description>

</config-description:config-descriptions>
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Bundle-Version: 1.14.0.qualifier
Bundle-Vendor: openHAB.org
Bundle-RequiredExecutionEnvironment: JavaSE-1.7
DynamicImport-Package: *
Import-Package: com.google.common.collect;version="10.0.1",
Import-Package: com.google.common.collect,
org.apache.commons.lang,
org.openhab.core.items,
org.openhab.core.library.items,
Expand Down
46 changes: 30 additions & 16 deletions bundles/persistence/org.openhab.persistence.dynamodb/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,21 +55,21 @@ This service can be configured in the file `services/dynamodb.cfg`.

### Basic configuration

| Property | Default | Required | Description |
|----------|---------|:--------:|-------------|
| accessKey | | Yes | access key as shown in [Setting up Amazon account](#setting-up-amazon-account). |
| secretKey | | Yes | secret key as shown in [Setting up Amazon account](#setting-up-amazon-account). |
| region | | Yes | AWS region ID as described in [Setting up Amazon account](#setting-up-amazon-account). The region needs to match the region that was used to create the user. |
| Property | Default | Required | Description |
| --------- | ------- | :------: | ------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| accessKey | | Yes | access key as shown in [Setting up Amazon account](#setting-up-amazon-account). |
| secretKey | | Yes | secret key as shown in [Setting up Amazon account](#setting-up-amazon-account). |
| region | | Yes | AWS region ID as described in [Setting up Amazon account](#setting-up-amazon-account). The region needs to match the region that was used to create the user. |

### Configuration Using Credentials File

Alternatively, instead of specifying `accessKey` and `secretKey`, one can configure a configuration profile file.

| Property | Default | Required | Description |
|----------|---------|:--------:|-------------|
| profilesConfigFile | | Yes | path to the credentials file. For example, `/etc/openhab2/aws_creds`. Please note that the user that runs openHAB must have approriate read rights to the credential file. For more details on the Amazon credential file format, see [Amazon documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html). |
| profile | | Yes | name of the profile to use |
| region | | Yes | AWS region ID as described in Step 2 in [Setting up Amazon account](#setting-up-amazon-account). The region needs to match the region that was used to create the user. |
| Property | Default | Required | Description |
| ------------------ | ------- | :------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| profilesConfigFile | | Yes | path to the credentials file. For example, `/etc/openhab2/aws_creds`. Please note that the user that runs openHAB must have approriate read rights to the credential file. For more details on the Amazon credential file format, see [Amazon documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html). |
| profile | | Yes | name of the profile to use |
| region | | Yes | AWS region ID as described in Step 2 in [Setting up Amazon account](#setting-up-amazon-account). The region needs to match the region that was used to create the user. |

Example of service configuration file (`services/dynamodb.cfg`):

Expand All @@ -91,11 +91,15 @@ aws_secret_access_key=testSecretKey

In addition to the configuration properties above, the following are also available:

| Property | Default | Required | Description |
|----------|---------|:--------:|-------------|
| readCapacityUnits | 1 | No | read capacity for the created tables |
| writeCapacityUnits | 1 | No | write capacity for the created tables |
| tablePrefix | `openhab-` | No | table prefix used in the name of created tables |
| Property | Default | Required | Description |
| -------------------------- | ---------- | :------: | -------------------------------------------------------------------------------------------------- |
| readCapacityUnits | 1 | No | read capacity for the created tables |
| writeCapacityUnits | 1 | No | write capacity for the created tables |
| tablePrefix | `openhab-` | No | table prefix used in the name of created tables |
| bufferCommitIntervalMillis | 1000 | No | Interval to commit (write) buffered data. In milliseconds. |
| bufferSize | 1000 | No | Internal buffer size which is used to batch writes to DynamoDB every `bufferCommitIntervalMillis`. |

Typically you should not need to modify parameters related to buffering.

Refer to Amazon documentation on [provisioned throughput](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.ProvisionedThroughput.html) for details on read/write capacity.

Expand All @@ -105,10 +109,20 @@ All item- and event-related configuration is done in the file `persistence/dynam

### Tables Creation

When an item is persisted via this service, a table is created (if necessary). Currently, the service will create at most two tables for different item types. The tables will be named `<prefix><item-type>`, where `<prefix>` matches the `tablePrefix` configuration property; while the `<item-type>` is either `bigdecimal` (numeric items) or `string` (string and complex items).
When an item is persisted via this service, a table is created (if necessary). Currently, the service will create at most two tables for different item types. The tables will be named `<tablePrefix><item-type>`, where the `<item-type>` is either `bigdecimal` (numeric items) or `string` (string and complex items).

Each table will have three columns: `itemname` (item name), `timeutc` (in ISO 8601 format with millisecond accuracy), and `itemstate` (either a number or string representing item state).

## Buffering

By default, the service is asynchronous which means that data is not written immediately to DynamoDB but instead buffered in-memory.
The size of the buffer, in terms of datapoints, can be configured with `bufferSize`.
Every `bufferCommitIntervalMillis` the whole buffer of data is flushed to DynamoDB.

It is recommended to have the buffering enabled since the synchronous behaviour (writing data immediately) might have adverse impact to the whole system when there is many items persisted at the same time. The buffering can be disabled by setting `bufferSize` to zero.

The defaults should be suitable in many use cases.

### Caveats

When the tables are created, the read/write capacity is configured according to configuration. However, the service does not modify the capacity of existing tables. As a workaround, you can modify the read/write capacity of existing tables using the [Amazon console](https://aws.amazon.com/console/).
Expand Down
Loading