cassandra client in Java——cassandra总结(五)

  cassandra client是基于Apache Thrift这个RPC框架来进行客户端和服务器的通信。  

  首先到$CASSANDRA_HOMElib目录下导入apache-cassandra-thrift-2.1.11.jar,libthrift-0.9.2.jar这两个包,然后再导入单元测试JUnit包。

import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
 
import org.apache.cassandra.thrift.InvalidRequestException;  
import org.apache.cassandra.thrift.TimedOutException;  
import org.apache.cassandra.thrift.UnavailableException;  
import org.apache.thrift.TException;  
    
import org.apache.cassandra.thrift.ColumnOrSuperColumn;
import org.apache.cassandra.thrift.ColumnPath;
import org.apache.cassandra.thrift.ConsistencyLevel; 
import org.apache.cassandra.thrift.Cassandra;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnParent;
import org.apache.cassandra.thrift.Mutation;
import org.apache.cassandra.thrift.NotFoundException;
import org.apache.cassandra.thrift.SlicePredicate;
import org.apache.cassandra.thrift.SliceRange;
import org.apache.cassandra.thrift.SuperColumn;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.junit.Test;

public class CassandraClient {
    /*
     * 对cassandra数据库的基础操作
     * */
    
    private static String host = "127.0.0.1";
    private static int port = 9160; 
    private static String keyspace = "cocoon"; 
    //暴露client供外界批量插入数据
    public static Cassandra.Client client = null;
    private static ThreadLocal<TTransport> ttrans = new ThreadLocal<TTransport>(); 
    
    //打开数据库连接
    public static TTransport openConnection(){
        TTransport tTransport = ttrans.get();
        if( tTransport == null ){
            tTransport = new TFramedTransport(new TSocket(host, port));
            TProtocol tProtocol = new TBinaryProtocol(tTransport);
            client = new Cassandra.Client(tProtocol);    
            try {
                tTransport.open();
                client.set_keyspace(keyspace);
                ttrans.set(tTransport);
                System.out.println(tTransport);
            } catch (TTransportException e) {
                e.printStackTrace();
            } catch (InvalidRequestException e) {
                e.printStackTrace();
            } catch (TException e) {
                e.printStackTrace();
            }
        }
        return tTransport;
    }
    
    //关闭数据库连接
    public static void closeConnection(){
        TTransport tTransport = ttrans.get();
        ttrans.set(null);
        if( tTransport != null && tTransport.isOpen() )
            tTransport.close();
    }
    
    
    //测试线程局部变量 
    @Test
    public void testThreadLocal() throws Exception{
        TTransport t1 = CassandraClient.openConnection();
        System.out.println(t1);
        TTransport t2 = CassandraClient.openConnection();
        System.out.println(t2);

        new Thread(){
            public void run(){
                TTransport t3 = CassandraClient.openConnection();
                System.out.println(t3);
                CassandraClient.closeConnection();
            }
        }.start();
        
        Thread.sleep(100);
        CassandraClient.closeConnection();
        System.out.println(t1.isOpen());
    }
    
    /*插入一个supercolumn(for super column family)
     * @param columns column的map集合
     * */
    public void insertSuperColumn(String superColumnFamily, String key, String superName, Map<String, String> columns) 
            throws UnsupportedEncodingException, InvalidRequestException,
            UnavailableException, TimedOutException, TException{
        openConnection();
        Map<ByteBuffer, Map<String, List<Mutation>>> map;
        map = new HashMap<ByteBuffer, Map<String, List<Mutation>>>();
        List<Mutation> list = new ArrayList<Mutation>();
        SuperColumn superColumn = new SuperColumn();
        superColumn.setName(CassandraClient.toByteBuffer(superName));
        Set<String> columnNames = columns.keySet();
        for(String columnName: columnNames) {
            Column c = new Column();
            c.setName(CassandraClient.toByteBuffer(columnName));
            c.setValue(CassandraClient.toByteBuffer(columns.get(columnName)));
            c.setTimestamp(System.currentTimeMillis());
            superColumn.addToColumns(c);
        }
        ColumnOrSuperColumn cos = new ColumnOrSuperColumn();
        cos.super_column = superColumn;
        Mutation mutation = new Mutation();
        mutation.column_or_supercolumn = cos;
        list.add(mutation);
        Map<String,List<Mutation>> supers = new HashMap<String, List<Mutation>>();
        supers.put(superColumnFamily, list);
        map.put(toByteBuffer(key), supers);
        client.batch_mutate(map, ConsistencyLevel.ONE);
        closeConnection();
    }
    
    //插入一个column(for standard column family)
    public void insertColumn(String columnFamily, String key, String columnName, String columnValue) throws UnsupportedEncodingException{
        openConnection();
        ColumnParent parent = new ColumnParent(columnFamily);
        if( client != null ) {
            Column column = new Column( toByteBuffer(columnName) );
            column.setValue(toByteBuffer(columnValue));
            long timestamp = System.currentTimeMillis();
            column.setTimestamp(timestamp);
            try {
                client.insert(toByteBuffer(key), parent, column, ConsistencyLevel.ONE);
            } catch (InvalidRequestException e) {
                e.printStackTrace();
            } catch (UnavailableException e) {
                e.printStackTrace();
            } catch (TimedOutException e) {
                e.printStackTrace();
            } catch (TException e) {
                e.printStackTrace();
            }
            closeConnection();
        }
    }
    
    /*获取key对应的column集合(for standard column family)
     * @return column的map集合
     * */
    public HashMap<String, String> getColumns(String columnFamily, String key) throws InvalidRequestException, UnavailableException, TimedOutException, UnsupportedEncodingException, TException{        
        openConnection();
        ColumnParent parent = new ColumnParent(columnFamily);
        if( client != null ) {
            SlicePredicate predicate = new SlicePredicate();
            //定义查询的columnName范围(begin~end),正反方向,数目(columnName在数据库中是排好序的,所以有正方向查询,反方向查询)
            SliceRange sliceRange = new SliceRange(toByteBuffer(""), toByteBuffer(""), false,100);
            predicate.setSlice_range(sliceRange); 
            List<ColumnOrSuperColumn> results = client.get_slice(toByteBuffer(key), parent, predicate, ConsistencyLevel.ONE);  
            if( results == null )  
                return null;
            HashMap<String, String> map = new HashMap<String, String>();
            for (ColumnOrSuperColumn result : results)  
            {  
                Column column = result.column;  
                map.put(byteBufferToString(column.name), byteBufferToString(column.value));
                //System.out.println(byteBufferToString(column.name) + " : " + byteBufferToString(column.value) );  
            }  
            closeConnection();
            return map;
        }
        return null;
    }
    
    /*获取key对应的superColumn集合(for super column family)
     * @return superColumn的map集合
     * */
    public HashMap<String, Map<String, String>> getSuperColumns(String superColumnFamily, String key) throws InvalidRequestException, UnavailableException, TimedOutException, UnsupportedEncodingException, TException{        
        openConnection();
        ColumnParent parent = new ColumnParent(superColumnFamily);
        if( client != null ) {
            SlicePredicate predicate = new SlicePredicate();
            //定义查询的superColumn.key范围,正反方向,数目(superColumn.key在数据库中是排好序的,所以有正方向查询,反方向查询)
            SliceRange sliceRange = new SliceRange(toByteBuffer(""), toByteBuffer(""), false, 100);  
            predicate.setSlice_range(sliceRange); 
            List<ColumnOrSuperColumn> results = client.get_slice(toByteBuffer(key), parent, predicate, ConsistencyLevel.ONE);  
            if( results == null )  
                return null;
            HashMap<String, Map<String, String>> supers = new HashMap<String, Map<String,String>>();
            for (ColumnOrSuperColumn result : results)  
            {  
                SuperColumn superColumn = result.super_column;  
                Map<String, String> columns = new HashMap<String, String>();
                for( Column column : superColumn.columns){
                    columns.put(byteBufferToString(column.name), byteBufferToString(column.value));
                }
                supers.put(byteBufferToString(superColumn.name), columns);
                //System.out.println(byteBufferToString(column.name) + " : " + byteBufferToString(column.value) );  
            }  
            closeConnection();
            return supers;
        }
        return null;
    }
    
    /*获取key,columnName对应的columnValue(for standard column family)
     * @return String
     * */
    public String getColumnValue(String columnFamily, String key, String columnName){
        try {
            ColumnPath path = new ColumnPath(columnFamily);
            path.setColumn(toByteBuffer(columnName));
            try {
                openConnection();
                String result =  new String( client.get(toByteBuffer(key), path, ConsistencyLevel.ONE).getColumn().getValue() );
                closeConnection();
                return result;
            } catch (InvalidRequestException e) {
                e.printStackTrace();
            } catch (NotFoundException e) {
                e.printStackTrace();
            } catch (UnavailableException e) {
                e.printStackTrace();
            } catch (TimedOutException e) {
                e.printStackTrace();            
            } catch (TException e) {
                e.printStackTrace();
            }
        } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
        }
        return null;
    }    
    
    //String类型转化为ByteBuffer(UTF-8编码)
    public static ByteBuffer toByteBuffer(String value) throws UnsupportedEncodingException  {  
        return ByteBuffer.wrap(value.getBytes("UTF-8"));  
    }
    
    //ByteBuffer类型转化为String(UTF-8编码)
    public static String byteBufferToString(ByteBuffer byteBuffer) throws UnsupportedEncodingException{
        byte[] bytes = new byte[byteBuffer.remaining()];
        byteBuffer.get(bytes);
        return new String(bytes,"UTF-8");
    }
}

批量插入数据调用client.batch_mutate()方法,standard和super类型的数据都可以批量插入,insertSuperColumn()方法中有用到。

需要注意的是在插入大量数据时,如果选择一条一条插入,打开一次数据库的连接就够了,如果调用上面的方法,每一条数据都打开一次数据库的连接会快速耗尽客户端使用的端口号(短暂端口号),短暂端口号的范围为49152 ~ 65535(即2^15+(2^16-2^15)/2 ~ 2^16-1),我们在编程时要避免大量使用短暂端口号。

原文地址:https://www.cnblogs.com/maheng/p/5004625.html