JS 与 trick 代码的运用及解析全攻略
657
2022-11-13
HBase client api例子集合-1
import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Delete;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Row;import org.apache.hadoop.hbase.util.Bytes;public class BatchExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); List
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Delete;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.util.Bytes;public class CheckAndDeleteExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Delete delete=new Delete(Bytes.toBytes("row6")); boolean ret=table.checkAndDelete(Bytes.toBytes("row6"), Bytes.toBytes("family1"), Bytes.toBytes("column11"),Bytes.toBytes("value8"), delete); table.close(); System.out.println(ret); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.util.Bytes;public class CheckAndSetExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Put put1=new Put(Bytes.toBytes("row6")); put1.add(Bytes.toBytes("family1"), Bytes.toBytes("column11"), Bytes.toBytes("value8")); table.checkAndPut(Bytes.toBytes("row6"), Bytes.toBytes("family1"), Bytes.toBytes("column6"), null, put1); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class ColumnCountFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Get get=new Get(Bytes.toBytes("row6")); ColumnCountGetFilter filter=new ColumnCountGetFilter(2); get.setFilter(filter); Result result=table.get(get); System.out.println(result); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.ColumnPaginationFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class ColumnPageFilterExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); ColumnPaginationFilter filter=new ColumnPaginationFilter(1, 2); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ for(KeyValue kv:result.raw()){ System.out.println(kv+"-----"+Bytes.toString(kv.getValue())); } } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class ColumnPrefixFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); ColumnPrefixFilter filter=new ColumnPrefixFilter(Bytes.toBytes("column6")); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Delete;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.util.Bytes;public class DeleteExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Delete delete=new Delete(Bytes.toBytes("row1")); delete.deleteColumn(Bytes.toBytes("family1"), Bytes.toBytes("column2"),1); table.delete(delete); table.close(); }}
import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Delete;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.util.Bytes;public class DeleteListExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); List
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;import org.apache.hadoop.hbase.filter.ByteArrayComparable;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.DependentColumnFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class DependentColumnFilterExample { public static void filter(boolean drop,CompareOp op,ByteArrayComparable comparable)throws Exception{ Configuration conf= HBaseConfiguration.create(); HTable hbasetalbe=new HTable(conf,"testtable"); DependentColumnFilter filter ; if(comparable==null){ filter=new DependentColumnFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), drop); }else{ filter=new DependentColumnFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), drop,op,comparable); } Scan scan=new Scan(); scan.setFilter(filter); ResultScanner resultScanner=hbasetalbe.getScanner(scan); for(Result result:resultScanner){ for(KeyValue kv:result.raw()){ System.out.println(kv+"-------"+Bytes.toString(kv.getValue())); } } resultScanner.close(); System.out.println("-----------------"); System.out.println("@@@@@@@@@@@@@@@@@@@@@@@"); } /** * @param args */ public static void main(String[] args)throws Exception { filter(false,null,null); filter(true,null,null); filter(false,CompareOp.EQUAL,new BinaryPrefixComparator(Bytes.toBytes("value6"))); filter(true,CompareOp.EQUAL,new BinaryPrefixComparator(Bytes.toBytes("value6"))); }}
import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.util.Bytes;public class ErrorPutExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,Bytes.toBytes("testtable")); List
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.BinaryComparator;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.FamilyFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class FamilyFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); FamilyFilter familyFilter=new FamilyFilter(CompareOp.LESS, new BinaryComparator(Bytes.toBytes("family2"))); scan.setFilter(familyFilter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); System.out.println("-----------"); Get get=new Get(Bytes.toBytes("row7")); get.setFilter(familyFilter); Result result=table.get(get); System.out.println(result); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class FirstKeyOnlyFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); FirstKeyOnlyFilter filter=new FirstKeyOnlyFilter(); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ for(KeyValue kv:result.raw()){ System.out.println(kv+"-----"+Bytes.toString(kv.getValue())); } } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.util.Bytes;public class FlushExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); HTable htable=new HTable(conf,"testtable"); System.out.println("auto flush:"+htable.isAutoFlush()); htable.setAutoFlush(false); Put put1=new Put(Bytes.toBytes("row2")); put1.add(Bytes.toBytes("family1"), Bytes.toBytes("column3"), Bytes.toBytes("value3")); htable.put(put1); Put put2=new Put(Bytes.toBytes("row3")); put2.add(Bytes.toBytes("family1"), Bytes.toBytes("column4"), Bytes.toBytes("value4")); htable.put(put2); Put put3=new Put(Bytes.toBytes("row4")); put3.add(Bytes.toBytes("family1"), Bytes.toBytes("column5"), Bytes.toBytes("value5")); htable.put(put3); Get get=new Get(Bytes.toBytes("row3")); Result result=htable.get(get); System.out.println(result); htable.flushCommits(); result=htable.get(get); System.out.println(result); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.util.Bytes;public class GetExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Get get=new Get(Bytes.toBytes("row1")); get.addColumn(Bytes.toBytes("family1"), Bytes.toBytes("column1")); Result result=table.get(get); byte[] value=result.getValue(Bytes.toBytes("family1"), Bytes.toBytes("column1")); System.out.println(Bytes.toString(value)); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.util.Bytes;public class GetRowOrBeforeExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Result result=table.getRowOrBefore(Bytes.toBytes("row1"), Bytes.toBytes("family1")); System.out.println(Bytes.toString(result.getRow())); Result result2=table.getRowOrBefore(Bytes.toBytes("row99"),Bytes.toBytes("family1")); System.out.println(result2); Result result3=table.getRowOrBefore(Bytes.toBytes("abc"), Bytes.toBytes("family1")); System.out.println(result3); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.InclusiveStopFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class InclusiveStopFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); scan.setStartRow(Bytes.toBytes("row7")); InclusiveStopFilter filter=new InclusiveStopFilter(Bytes.toBytes("row9")); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ for(KeyValue kv:result.raw()){ System.out.println(kv+"-----"+Bytes.toString(kv.getValue())); } } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.KeyOnlyFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class KeyOnlyFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); KeyOnlyFilter filter=new KeyOnlyFilter(true); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ for(KeyValue kv:result.raw()){ System.out.println(kv+"-----"+Bytes.toString(kv.getValue())); } } resultScanner.close(); }}
import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.util.Bytes;public class ListGetExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); List
import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.util.Bytes;public class ListPutExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); //HBaseAdmin admin=new HBaseAdmin(conf); HTable table=new HTable(conf, "testtable"); List
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.PageFilter;//Administratorpublic class PageFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); PageFilter filter=new PageFilter(1); scan.setFilter(filter); ResultScanner scanner=table.getScanner(scan); for(Result result:scanner){ System.out.println(result); } scanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.PrefixFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class PrefixFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); PrefixFilter filter=new PrefixFilter(Bytes.toBytes("row6")); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.util.Bytes;import org.codehaus.jackson.sym.BytesToNameCanonicalizer;public class PutExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable htable=new HTable(conf, "testtable"); Put put=new Put(Bytes.toBytes("row1")); put.add(Bytes.toBytes("family1"), Bytes.toBytes("column1"), Bytes.toBytes("value1")); put.add(Bytes.toBytes("family1"), Bytes.toBytes("column2"), 1000, Bytes.toBytes("value2")); htable.put(put); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.BinaryComparator;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.QualifierFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class QualifierFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); QualifierFilter filter=new QualifierFilter(CompareOp.LESS, new BinaryComparator(Bytes.toBytes("column9"))); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); System.out.println("------------"); Get get=new Get(Bytes.toBytes("row7")); get.setFilter(filter); Result result=table.get(get); System.out.println(result); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.RandomRowFilter;//Administratorpublic class RandomRowFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); RandomRowFilter filter=new RandomRowFilter(0.8f); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.BinaryComparator;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.RegexStringComparator;import org.apache.hadoop.hbase.filter.RowFilter;import org.apache.hadoop.hbase.filter.SubstringComparator;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class RowFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable hbasetalbe=new HTable(conf, "testtable"); Scan scan1=new Scan(); RowFilter rowFileter=new RowFilter(CompareOp.LESS, new BinaryComparator(Bytes.toBytes("row9"))); scan1.setFilter(rowFileter); ResultScanner resultScanner=hbasetalbe.getScanner(scan1); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); System.out.println("-----------------"); Scan scan2=new Scan(); RowFilter rowFilter2=new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".*")); scan2.setFilter(rowFilter2); resultScanner=hbasetalbe.getScanner(scan2); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); System.out.println("-----------------"); Scan scan3=new Scan(); RowFilter rowFilter3=new RowFilter(CompareOp.EQUAL, new SubstringComparator("9")); scan3.setFilter(rowFilter3); resultScanner=hbasetalbe.getScanner(scan3); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;public class ScanCacheAndBatchExample { public static void scan(int cache,int batch)throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); scan.setCaching(cache); scan.setBatch(batch); ResultScanner resultScan=table.getScanner(scan); int i=0; for(Result result:resultScan){ i++; } System.out.println(i); } /** * @param args */ public static void main(String[] args)throws Exception { scan(2,1); scan(1,2); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.util.Bytes;public class ScanExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan1=new Scan(); scan1.setStartRow(Bytes.toBytes("row9")); ResultScanner resultScanner=table.getScanner(scan1); for(Result result:resultScanner){ System.out.println(result); } Scan scan2=new Scan(); scan2.addColumn(Bytes.toBytes("family1"), Bytes.toBytes("column6")); resultScanner=table.getScanner(scan2); for(Result result:resultScanner){ System.out.println(result); } }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;import org.apache.hadoop.hbase.filter.SubstringComparator;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class SingleColumnValueExcludeFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); SingleColumnValueExcludeFilter filter=new SingleColumnValueExcludeFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), CompareOp.EQUAL,new SubstringComparator("value")); filter.setFilterIfMissing(true); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ for(KeyValue kv:result.raw()){ System.out.println(kv+"----"+Bytes.toString(kv.getValue())); } } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;import org.apache.hadoop.hbase.filter.SubstringComparator;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class SingleColumnValueFilterExample { /** * @param args */ public static void main(String[] args)throws Exception { Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); SingleColumnValueFilter filter=new SingleColumnValueFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), CompareOp.EQUAL,new SubstringComparator("x")); filter.setFilterIfMissing(true); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ for(KeyValue kv:result.raw()){ System.out.println(kv+"----"+Bytes.toString(kv.getValue())); } } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.BinaryComparator;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.SkipFilter;import org.apache.hadoop.hbase.filter.ValueFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class SkipFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); ValueFilter valueFilter=new ValueFilter(CompareOp.NOT_EQUAL, new BinaryComparator(Bytes.toBytes("value8"))); Scan scan=new Scan(); scan.setFilter(valueFilter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); System.out.println("-------------"); SkipFilter skipFilter=new SkipFilter(valueFilter); scan.setFilter(skipFilter); resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.client.HBaseAdmin;import org.apache.hadoop.hbase.util.Bytes;public class TableExample { private static void createTable1()throws Exception{ Configuration con=HBaseConfiguration.create(); HBaseAdmin admin=new HBaseAdmin(con); HTableDescriptor desc=new HTableDescriptor("testtable"); admin.createTable(desc); admin.close(); } private static void createTable2()throws Exception{ Configuration con=HBaseConfiguration.create(); HBaseAdmin admin=new HBaseAdmin(con); if(admin.tableExists("testtable")){ admin.disableTable("testtable"); admin.deleteTable("testtable"); } HTableDescriptor desc=new HTableDescriptor("testtable"); HColumnDescriptor colDesc=new HColumnDescriptor(Bytes.toBytes("family1")); desc.addFamily(colDesc); admin.createTable(desc); admin.close(); } /** * @param args */ public static void main(String[] args)throws Exception { createTable2(); }}
import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.TimestampsFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class TimeStampsFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); List
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.RegexStringComparator;import org.apache.hadoop.hbase.filter.ValueFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class ValueFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); Scan scan=new Scan(); ValueFilter filter=new ValueFilter(CompareOp.EQUAL, new RegexStringComparator("xxx")); scan.setFilter(filter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ for(KeyValue keyValue:result.raw()){ System.out.println(Bytes.toString(keyValue.getValue())); } } resultScanner.close(); System.out.println("-------------"); Get get=new Get(Bytes.toBytes("row9")); get.setFilter(filter); Result result=table.get(get); for(KeyValue kv:result.raw()){ System.out.println(Bytes.toString(kv.getValue())); } }}
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.BinaryComparator;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.RowFilter;import org.apache.hadoop.hbase.filter.WhileMatchFilter;import org.apache.hadoop.hbase.util.Bytes;//Administratorpublic class WhileMatchFilterExample { /** * @param args */ public static void main(String[] args) throws Exception{ Configuration conf=HBaseConfiguration.create(); HTable table=new HTable(conf,"testtable"); RowFilter rowFilter=new RowFilter(CompareOp.EQUAL,new BinaryComparator(Bytes.toBytes("row7")) ); Scan scan=new Scan(); scan.setFilter(rowFilter); ResultScanner resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); System.out.println("----------------------"); WhileMatchFilter matchFilter=new WhileMatchFilter(rowFilter); scan.setFilter(matchFilter); resultScanner=table.getScanner(scan); for(Result result:resultScanner){ System.out.println(result); } resultScanner.close(); }}
版权声明:本文内容由网络用户投稿,版权归原作者所有,本站不拥有其著作权,亦不承担相应法律责任。如果您发现本站中有涉嫌抄袭或描述失实的内容,请联系我们jiasou666@gmail.com 处理,核实后本网站将在24小时内删除侵权内容。
发表评论
暂时没有评论,来抢沙发吧~