diff --git a/googlebigtable/pom.xml b/googlebigtable/pom.xml index 2d7c1aaa4d85c203ad47c8114e63c9bf87d9096c..20c4c71a3de13610cfd1cd44d49392833612c38b 100644 --- a/googlebigtable/pom.xml +++ b/googlebigtable/pom.xml @@ -32,16 +32,10 @@ LICENSE file. <dependencies> <dependency> <groupId>com.google.cloud.bigtable</groupId> - <artifactId>bigtable-hbase-1.0</artifactId> + <artifactId>bigtable-hbase-1.x</artifactId> <version>${googlebigtable.version}</version> </dependency> - <dependency> - <groupId>io.netty</groupId> - <artifactId>netty-tcnative-boringssl-static</artifactId> - <version>1.1.33.Fork26</version> - </dependency> - <dependency> <groupId>com.yahoo.ycsb</groupId> <artifactId>core</artifactId> diff --git a/googlebigtable/src/main/java/com/yahoo/ycsb/db/GoogleBigtableClient.java b/googlebigtable/src/main/java/com/yahoo/ycsb/db/GoogleBigtableClient.java index 8e0c7b0053bd3fa9b209fca2b4dd3aa92690c587..957724d5953cfbae8fe990516e55941f06890072 100644 --- a/googlebigtable/src/main/java/com/yahoo/ycsb/db/GoogleBigtableClient.java +++ b/googlebigtable/src/main/java/com/yahoo/ycsb/db/GoogleBigtableClient.java @@ -34,7 +34,6 @@ import java.util.Set; import java.util.Vector; import java.util.concurrent.ExecutionException; -import com.google.bigtable.repackaged.com.google.protobuf.ByteString; import com.google.bigtable.v2.Column; import com.google.bigtable.v2.Family; import com.google.bigtable.v2.MutateRowRequest; @@ -51,10 +50,10 @@ import com.google.cloud.bigtable.config.BigtableOptions; import com.google.cloud.bigtable.grpc.BigtableDataClient; import com.google.cloud.bigtable.grpc.BigtableSession; import com.google.cloud.bigtable.grpc.BigtableTableName; -import com.google.cloud.bigtable.grpc.async.AsyncExecutor; import com.google.cloud.bigtable.grpc.async.BulkMutation; import com.google.cloud.bigtable.hbase.BigtableOptionsFactory; import com.google.cloud.bigtable.util.ByteStringer; +import com.google.protobuf.ByteString; import com.yahoo.ycsb.ByteArrayByteIterator; import com.yahoo.ycsb.ByteIterator; import com.yahoo.ycsb.DBException; @@ -89,10 +88,9 @@ public class GoogleBigtableClient extends com.yahoo.ycsb.DB { private static BigtableOptions options; private static BigtableSession session; - /** Thread loacal Bigtable native API objects. */ + /** Thread local Bigtable native API objects. */ private BigtableDataClient client; - private AsyncExecutor asyncExecutor; - + /** The column family use for the workload. */ private byte[] columnFamilyBytes; @@ -128,8 +126,8 @@ public class GoogleBigtableClient extends com.yahoo.ycsb.DB { CONFIG.set((String)entry.getKey(), (String)entry.getValue()); } - clientSideBuffering = getProperties().getProperty(CLIENT_SIDE_BUFFERING, "false") - .equals("true") ? true : false; + clientSideBuffering = getProperties() + .getProperty(CLIENT_SIDE_BUFFERING, "false").equals("true"); System.err.println("Running Google Bigtable with Proto API" + (clientSideBuffering ? " and client side buffering." : ".")); @@ -150,10 +148,6 @@ public class GoogleBigtableClient extends com.yahoo.ycsb.DB { } else { client = session.getDataClient(); } - - if (clientSideBuffering) { - asyncExecutor = session.createAsyncExecutor(); - } } if ((getProperties().getProperty("debug") != null) @@ -174,17 +168,14 @@ public class GoogleBigtableClient extends com.yahoo.ycsb.DB { if (bulkMutation != null) { try { bulkMutation.flush(); - } catch(RuntimeException e){ + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); throw new DBException(e); - } - } - if (asyncExecutor != null) { - try { - asyncExecutor.flush(); - } catch (IOException e) { + } catch(RuntimeException e){ throw new DBException(e); } } + synchronized (CONFIG) { --threadCount; if (threadCount <= 0) { @@ -446,9 +437,14 @@ public class GoogleBigtableClient extends com.yahoo.ycsb.DB { .getBytes(); synchronized(this) { if (bulkMutation != null) { - bulkMutation.flush(); + try { + bulkMutation.flush(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } } - bulkMutation = session.createBulkMutation(tableName, asyncExecutor); + bulkMutation = session.createBulkMutation(tableName); } } } diff --git a/hbase10/README.md b/hbase10/README.md index 611d3a6f821579a74a50b231b236049aea90fcb1..8fdc7e58ad910aba9096647e5d14494c30adc49c 100644 --- a/hbase10/README.md +++ b/hbase10/README.md @@ -49,19 +49,20 @@ hbase(main):002:0> create 'usertable', 'cf', {SPLITS => (1..n_splits).map {|i| " Make a note of the column family, in this example it's `cf``. -### 4. Fetch the Proper ALPN Boot Jar +### 4. Download the Bigtable Client Jar with required dependencies: -The Bigtable protocol uses HTTP/2 which requires an ALPN protocol negotiation implementation. On JVM instantiation the implementation must be loaded before attempting to connect to the cluster. If you're using Java 7 or 8, use this [Jetty Version Table](http://www.eclipse.org/jetty/documentation/current/alpn-chapter.html#alpn-versions) to determine the version appropriate for your JVM. (ALPN is included in JDK 9+). Download the proper jar from [Maven](http://search.maven.org/#search%7Cgav%7C1%7Cg%3A%22org.mortbay.jetty.alpn%22%20AND%20a%3A%22alpn-boot%22) somewhere on your system. - -### 5. Download the Bigtable Client Jar +``` +mvn -N dependency:copy -Dartifact=com.google.cloud.bigtable:bigtable-hbase-1.x-hadoop:1.0.0 -DoutputDirectory=target/bigtable-deps +mvn -N dependency:copy -Dartifact=io.dropwizard.metrics:metrics-core:3.1.2 -DoutputDirectory=target/bigtable-deps +``` -Download one of the `bigtable-hbase-1.#` jars from [Maven](http://search.maven.org/#search%7Cga%7C1%7Ccom.google.cloud.bigtable) to your host. +Download the latest `bigtable-hbase-1.x-hadoop` jar from [Maven](http://search.maven.org/#search%7Cgav%7C1%7Cg%3A%22com.google.cloud.bigtable%22%20AND%20a%3A%22bigtable-hbase-1.x-hadoop%22) to your host. -### 6. Download JSON Credentials +### 5. Download JSON Credentials Follow these instructions for [Generating a JSON key](https://cloud.google.com/bigtable/docs/installing-hbase-shell#service-account) and save it to your host. -### 7. Create or Edit hbase-site.xml +### 6. Create or Edit hbase-site.xml If you have an existing HBase configuration directory with an `hbase-site.xml` file, edit the file as per below. If not, create a directory called `conf` under the `hbase10` directory. Create a file in the conf directory named `hbase-site.xml`. Provide the following settings in the XML file, making sure to replace the bracketed examples with the proper values from your Cloud console. @@ -69,19 +70,15 @@ If you have an existing HBase configuration directory with an `hbase-site.xml` f <configuration> <property> <name>hbase.client.connection.impl</name> - <value>com.google.cloud.bigtable.hbase1_0.BigtableConnection</value> - </property> - <property> - <name>google.bigtable.cluster.name</name> - <value>[YOUR-CLUSTER-ID]</value> + <value>com.google.cloud.bigtable.hbase1_x.BigtableConnection</value> </property> <property> <name>google.bigtable.project.id</name> <value>[YOUR-PROJECT-ID]</value> </property> <property> - <name>google.bigtable.zone.name</name> - <value>[YOUR-ZONE-NAME]</value> + <name>google.bigtable.instance.id</name> + <value>[YOUR-INSTANCE-ID]</value> </property> <property> <name>google.bigtable.auth.service.account.enable</name> @@ -94,22 +91,20 @@ If you have an existing HBase configuration directory with an `hbase-site.xml` f </configuration> ``` -If you wish to try other API implementations (1.1.x or 1.2.x) change the `hbase.client.connection.impl` appropriately to match the JAR you downloaded. - If you have an existing HBase config directory, make sure to add it to the class path via `-cp <PATH_TO_BIGTABLE_JAR>:<CONF_DIR>`. -### 8. Execute a Workload +### 7. Execute a Workload Switch to the root of the YCSB repo and choose the workload you want to run and `load` it first. With the CLI you must provide the column family, cluster properties and the ALPN jar to load. ``` -bin/ycsb load hbase10 -p columnfamily=cf -cp <PATH_TO_BIGTABLE_JAR> -jvm-args='-Xbootclasspath/p:<PATH_TO_ALPN_JAR>' -P workloads/workloada +bin/ycsb load hbase10 -p columnfamily=cf -cp 'target/bigtable-deps/*' -P workloads/workloada ``` The `load` step only executes inserts into the datastore. After loading data, run the same workload to mix reads with writes. ``` -bin/ycsb run hbase10 -p columnfamily=cf -jvm-args='-Xbootclasspath/p:<PATH_TO_ALPN_JAR>' -P workloads/workloada +bin/ycsb run hbase10 -p columnfamily=cf -cp 'target/bigtable-deps/* -P workloads/workloada ``` diff --git a/pom.xml b/pom.xml index 2d730d4deb16536f8ef6b5695024d16f22aa5a3d..af13d1b156342eceb224b3351ea3bd738b71e808 100644 --- a/pom.xml +++ b/pom.xml @@ -85,7 +85,7 @@ LICENSE file. <couchbase2.version>2.3.1</couchbase2.version> <elasticsearch5-version>5.5.1</elasticsearch5-version> <geode.version>1.2.0</geode.version> - <googlebigtable.version>0.9.7</googlebigtable.version> + <googlebigtable.version>1.0.0</googlebigtable.version> <hbase098.version>0.98.14-hadoop2</hbase098.version> <hbase10.version>1.0.2</hbase10.version> <hbase12.version>1.2.5</hbase12.version>