开发者社区> 技术小胖子> 正文
阿里云
为了无法计算的价值
打开APP
阿里云APP内打开

用java连接apache geode

简介:
+关注继续查看

昨天研究了半天apache geode,通过gfsh命令查询和操作比较简单,但是如何通过程序查询因为资历太少,没有搞懂,试验了半天才明白

1.创建测试region

1
create region --name=user --type=REPLICATE_PERSISTENT


2.用eclipse创建maven工程

修改pom.xml,需要引入gemfire-8.2.0.jar,这个在

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
 
  <groupId>io.proinsight</groupId>
  <artifactId>hellogeode</artifactId>
  <version>0.0.1-SNAPSHOT</version>
  <packaging>jar</packaging>
 
  <name>hellogeode</name>
  <url>http://maven.apache.org</url>
 
  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
  </properties>
 
  <dependencies>
  <!-- https://mvnrepository.com/artifact/org.apache.geode/geode-core -->
    <dependency>
        <groupId>org.apache.geode</groupId>
        <artifactId>geode-core</artifactId>
        <version>1.2.0</version>
    </dependency>
     
    <!-- https://mvnrepository.com/artifact/org.apache.geode/geode-json -->
<dependency>
    <groupId>org.apache.geode</groupId>
    <artifactId>geode-json</artifactId>
    <version>1.2.0</version>
</dependency>
     
    <!-- https://mvnrepository.com/artifact/org.apache.geode/geode-common -->
<dependency>
    <groupId>org.apache.geode</groupId>
    <artifactId>geode-common</artifactId>
    <version>1.2.0</version>
</dependency>
    <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
<dependency>
    <groupId>org.apache.logging.log4j</groupId>
    <artifactId>log4j-core</artifactId>
    <version>2.8.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api -->
<dependency>
    <groupId>org.apache.logging.log4j</groupId>
    <artifactId>log4j-api</artifactId>
    <version>2.8.2</version>
</dependency>
 
        <!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
    <groupId>log4j</groupId>
    <artifactId>log4j</artifactId>
    <version>1.2.17</version>
</dependency>
<dependency>
            <groupId>commons-codec</groupId>
            <artifactId>commons-codec</artifactId>
            <version>1.10</version>
        </dependency>
         
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.4</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.12</version>
        </dependency>
         
            <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.13</version>
        </dependency>
  </dependencies>
<build>
  <plugins>
              <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.3</version>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                </configuration>
            </plugin>
        <plugin>  
            <groupId>org.apache.maven.plugins</groupId>  
            <artifactId>maven-jar-plugin</artifactId>  
            <version>2.6</version>  
            <configuration>  
                <archive>  
                    <manifest>  
                        <addClasspath>true</addClasspath>  
                        <classpathPrefix>lib/</classpathPrefix>  
                        <mainClass>io.proinsight.hellogeode.App</mainClass>  
                    </manifest
      
                </archive
     
            </configuration>  
        </plugin>  
        <plugin>  
            <groupId>org.apache.maven.plugins</groupId>  
            <artifactId>maven-dependency-plugin</artifactId>  
            <version>2.10</version>  
            <executions>  
                <execution>  
                    <id>copy-dependencies</id>  
                    <phase>package</phase>  
                    <goals>  
                        <goal>copy-dependencies</goal>  
                    </goals>  
                    <configuration>  
                        <outputDirectory>${project.build.directory}/lib</outputDirectory>  
                    </configuration>  
                </execution>  
            </executions>  
        </plugin>
    </plugins>
  </build>
</project>


新建UserBean.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
package io.proinsight.hellogeode;
 
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
 
import java.util.Date;
 
import org.apache.geode.DataSerializable;
import org.apache.geode.DataSerializer;
 
public class UserBean implements DataSerializable{
    /**
     * 必须有这个方法,否则查询会报没有init()这个方法
     * */
    public UserBean() {
         
    }
    public UserBean(int id2, int i, String string, Date date) {
        // TODO Auto-generated constructor stub
        this.id=id2;
        this.age=i;
        this.name=string;
        this.createTime=date;
    }
    /**
     * id
     * */
    private int id;
     
    /**
     * 年龄
     * */
    private int age;
    /**
     * 姓名
     * */
    private String name;
    /**
     * 创建时间
     * */
    private Date createTime;
 
    public int getId() {
        return id;
    }
 
    public void setId(int id) {
        this.id = id;
    }
 
    public int getAge() {
        return age;
    }
 
    public void setAge(int age) {
        this.age = age;
    }
 
    public String getName() {
        return name;
    }
 
    public void setName(String name) {
        this.name = name;
    }
 
    public Date getCreateTime() {
        return createTime;
    }
     
    public void setCreateTime(Date createTime) {
        this.createTime = createTime;
    }
    public void fromData(DataInput in) throws IOException, ClassNotFoundException {
        this.id = in.readInt();
        this.age=in.readInt();
         this.name = in.readUTF();
         this.createTime = DataSerializer.readDate(in);
         
    }
    public void toData(DataOutput out) throws IOException {
         out.writeInt(this.id);
         out.writeInt(this.age);
         out.writeUTF(this.name);
         DataSerializer.writeDate(this.createTime, out);
         
    }
}


修改App.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
package io.proinsight.hellogeode;
 
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
 
import org.apache.geode.DataSerializer;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionFactory;
import org.apache.geode.cache.client.ClientCache;
import org.apache.geode.cache.client.ClientCacheFactory;
import org.apache.geode.cache.client.ClientRegionFactory;
import org.apache.geode.cache.client.ClientRegionShortcut;
import org.apache.geode.cache.query.internal.ResultsBag;
 
import com.alibaba.fastjson.JSON;
 
/**
 * Hello world!
 *
 */
public class App 
{
    String regionName="user";
    int userNum=10;
    public static void main( String[] args )
    {
        new App().test();
    }
     
    public void test(){
        Region region = null;
      ClientCache cache = new ClientCacheFactory().addPoolLocator("192.168.60.50"10334).create();
  
      ClientRegionFactory rf = cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY);
      //打开user表
       region = rf.create(regionName);
         
       System.out.println("开始创建用户");
        create(region);
        select(region);
        System.out.println("完成创建用户");
         
         
        System.out.println("开始修改用户");
        update(region);
        select(region);
        System.out.println("完成修改用户");
         
         
        System.out.println("开始删除用户");
        delete(region,108);
        select(region);
        System.out.println("完成删除用户");
         
         
        System.out.println("开始清空表");
        truncate(region);
        select(region);
        System.out.println("完成清空表");
         
        //关闭表
        region.close();
    }
     
    /**
     * 查询user表
     * */
    public void select(Region region){
        try {
            Object objList = region.query("select * from /"+regionName+" u where u.age > 15");
            
            if(objList != null && objList instanceof ResultsBag){
                Iterator iter = ((ResultsBag) objList).iterator();
                while(iter.hasNext()){
                    UserBean userBean = (UserBean) iter.next();
                    System.out.println("User信息:"+JSON.toJSONString(userBean));
                }
            }
            Object obj = region.get(108);
            if(obj != null && obj instanceof UserBean){
                System.out.println("User108信息"+JSON.toJSONString(obj));
            }
        }catch (Exception e){
            e.printStackTrace();
//            logger.error("error occured.", e);
        }
    }
    
     
    /**
     * 增加10个
     * */
    public void create(Region region){
        for (int i = 0; i < userNum; i++) {
            int id = i+100;
            region.put(id, new UserBean(id,10+i,"username:"+id,new Date()));
        }
    }
     
     
    /**
     * 修改
     * */
    public void update(Region region){
        UserBean user108= (UserBean) region.get(108);
        if(user108 != null && user108 instanceof UserBean){
            System.out.println("User108信息"+JSON.toJSONString(user108));
          //修改年龄为
            user108.setAge(12);
            region.put(user108.getId(), user108);
        }
         
    }
     
    /**
     * 删除某个用户
     * */
    public void delete(Region region,Integer id){
        region.remove(id);
    }
     
     
    /**
     * 清空表
     * */
    public void truncate(Region region){
        region.clear();
    }
     
    
}



3.打包

maven install一下

在target目录下有对应的jar包

然后把hellogeode-0.0.1-SNAPSHOT.jar拷贝到geode服务器上某个目录例如/tmp/t/下

4.发布Jar到geode里

为啥要发布到geode里呢?因为我们用UserBean的序列化和反序列化作为表结构,因此deode查询时需要UserBean.class,因此我们发布hellogeode-0.0.1-SNAPSHOT.jar到geode,主要是为了把UserBean.class导入到geode里;

1
2
3
4
gfsh>deploy  --jar=/tmp/t/hellogeode-0.0.1-SNAPSHOT.jar 
 Member  |         Deployed JAR          | Deployed JAR Location
-------- | ----------------------------- | -----------------------------------------------------------------
server50 | hellogeode-0.0.1-SNAPSHOT.jar | /opt/apache-geode-1.2.1/server50/hellogeode-0.0.1-SNAPSHOT.v1.jar

若是我们对UserBean有修改的话,需要先卸载jar,然后再重新发布

1
2
3
4
5
6
7
8
9
gfsh>undeploy  --jar=hellogeode-0.0.1-SNAPSHOT.jar 
 Member  |        Un-Deployed JAR        | Un-Deployed From JAR Location
-------- | ----------------------------- | -----------------------------------------------------------------
server50 | hellogeode-0.0.1-SNAPSHOT.jar | /opt/apache-geode-1.2.1/server50/hellogeode-0.0.1-SNAPSHOT.v1.jar
 
gfsh>deploy  --jar=/tmp/t/hellogeode-0.0.1-SNAPSHOT.jar 
 Member  |         Deployed JAR          | Deployed JAR Location
-------- | ----------------------------- | -----------------------------------------------------------------
server50 | hellogeode-0.0.1-SNAPSHOT.jar | /opt/apache-geode-1.2.1/server50/hellogeode-0.0.1-SNAPSHOT.v1.jar

5.测试

测试程序在eclipse里运行即可,不需要放到服务器上运行

在eclipse运行app.java

测试结果

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
开始创建用户
[info 2017/09/27 13:09:13.422 CST <poolTimer-DEFAULT-2> tid=0x17] AutoConnectionSource discovered new locators [slave1/211.98.71.195:10334]
 
[info 2017/09/27 13:09:13.511 CST <main> tid=0x1] Updating membership port.  Port changed from 0 to 51,084.  ID is now bogon(92040:loner):0:0c29bdc1
 
User信息:{"age":12,"createTime":1506488953578,"id":102,"name":"username:102"}
User信息:{"age":15,"createTime":1506488953592,"id":105,"name":"username:105"}
User信息:{"age":14,"createTime":1506488953588,"id":104,"name":"username:104"}
User信息:{"age":10,"createTime":1506488953405,"id":100,"name":"username:100"}
User信息:{"age":19,"createTime":1506488953611,"id":109,"name":"username:109"}
User信息:{"age":13,"createTime":1506488953583,"id":103,"name":"username:103"}
User信息:{"age":18,"createTime":1506488953606,"id":108,"name":"username:108"}
User信息:{"age":11,"createTime":1506488953573,"id":101,"name":"username:101"}
User信息:{"age":16,"createTime":1506488953597,"id":106,"name":"username:106"}
User信息:{"age":17,"createTime":1506488953602,"id":107,"name":"username:107"}
User108信息{"age":18,"createTime":1506488953606,"id":108,"name":"username:108"}
完成创建用户
开始修改用户
User108信息{"age":18,"createTime":1506488953606,"id":108,"name":"username:108"}
User信息:{"age":17,"createTime":1506488953602,"id":107,"name":"username:107"}
User信息:{"age":12,"createTime":1506488953578,"id":102,"name":"username:102"}
User信息:{"age":15,"createTime":1506488953592,"id":105,"name":"username:105"}
User信息:{"age":12,"createTime":1506488953606,"id":108,"name":"username:108"}
User信息:{"age":13,"createTime":1506488953583,"id":103,"name":"username:103"}
User信息:{"age":19,"createTime":1506488953611,"id":109,"name":"username:109"}
User信息:{"age":16,"createTime":1506488953597,"id":106,"name":"username:106"}
User信息:{"age":11,"createTime":1506488953573,"id":101,"name":"username:101"}
User信息:{"age":14,"createTime":1506488953588,"id":104,"name":"username:104"}
User信息:{"age":10,"createTime":1506488953405,"id":100,"name":"username:100"}
User108信息{"age":12,"createTime":1506488953606,"id":108,"name":"username:108"}
完成修改用户
开始删除用户
User信息:{"age":15,"createTime":1506488953592,"id":105,"name":"username:105"}
User信息:{"age":12,"createTime":1506488953578,"id":102,"name":"username:102"}
User信息:{"age":16,"createTime":1506488953597,"id":106,"name":"username:106"}
User信息:{"age":10,"createTime":1506488953405,"id":100,"name":"username:100"}
User信息:{"age":13,"createTime":1506488953583,"id":103,"name":"username:103"}
User信息:{"age":19,"createTime":1506488953611,"id":109,"name":"username:109"}
User信息:{"age":11,"createTime":1506488953573,"id":101,"name":"username:101"}
User信息:{"age":17,"createTime":1506488953602,"id":107,"name":"username:107"}
User信息:{"age":14,"createTime":1506488953588,"id":104,"name":"username:104"}
完成删除用户
开始清空表
完成清空表









     本文转自yifangyou 51CTO博客,原文链接:http://blog.51cto.com/yifangyou/1969114,如需转载请自行联系原作者

版权声明:本文内容由阿里云实名注册用户自发贡献,版权归原作者所有,阿里云开发者社区不拥有其著作权,亦不承担相应法律责任。具体规则请查看《阿里云开发者社区用户服务协议》和《阿里云开发者社区知识产权保护指引》。如果您发现本社区中有涉嫌抄袭的内容,填写侵权投诉表单进行举报,一经查实,本社区将立刻删除涉嫌侵权内容。

相关文章
Spring认证中国教育管理中心-Apache Geode 的 Spring 数据教程一
Spring认证中国教育管理中心-Apache Geode 的 Spring 数据教程一
0 0
Apache Geode 的 Spring 数据
Spring认证|Apache Geode 的 Spring 数据
0 0
Apache Geode/GemFire 数据分区和路由机制浅析
本篇文章简单介绍 Apache Geode/GemFire 的数据分区和数据路由机制,并深入到源码做简要地剖析Apache Geode/GemFire 是如何进行数据分区和数据路由的。
4272 0
Apache Geode/GemFire入门(2)-基本概念和模块
Apache Geode/GemFire 入门文章,简要介绍 Geode 基本概念和核心功能模块,以及基本使用方法,有助于初学者比较好的理解相关基本概念
5191 0
Apache Geode/GemFire入门(1)-基本概念和模块
Apache Geode/GemFire 入门文章,简要介绍 Geode 基本概念和核心功能模块,以及基本使用方法,有助于初学者比较好的理解相关基本概念
9794 0
《Apache Flink 必知必会》电子版地址
本书不仅有PMC 及 Committer 坐阵,全方位解析 Flink 底层架构、核心功能模块、社区生态等,知其然更知其所以然,还能带你手把手实操演示,轻松掌握 Flink 应用场景与功能实现的生产开发技能!零基础上手实战,7天轻松学会 Flink~
0 0
《Apache Flink 案例集(2022版)》电子版地址
希望通过本手册,可以让大家了解到大量来自不同领域的公司在数据集成、数据分析、人工智能、云原生以及企业数字化转型等应用场景中使用 Apache Flink 解决实际生产问题的成功案例,其中既包含传统和新兴的互联网公司,也包含通信、证券、银行等传统企业。
0 0
文章
问答
文章排行榜
最热
最新
相关电子书
更多
《基于Apache Hudi的CDC数据入湖》
立即下载
Apache Flink 案例集(2022版)
立即下载
贺小令|Apache Flink 1.16 简介
立即下载