cxf数据压缩

简介: 一、HTTP数据的压缩   在http协议中当content-encoding对应的值为gzip,deflate,x-gzip,x-deflate时,数据是经过了压缩之后再进行传输的。有些时候我们当我们传输的数据比较大的时候,可以采取这种方式,从而提高数据的传输速度。

一、HTTP数据的压缩

  在http协议中当content-encoding对应的值为gzip,deflate,x-gzip,x-deflate时,数据是经过了压缩之后再进行传输的。有些时候我们当我们传输的数据比较大的时候,可以采取这种方式,从而提高数据的传输速度。在web service中,当某个接口返回的数据比较大的时候,也可以进行压缩处理。

二、示例

  1、使用soap协议的web service

  实体类:

package com.cxf.compress.ws;

public class Person {
    private String name;

    private String homeAddress;

    private String companyAddress;

    public Person() {
        StringBuilder sb = new StringBuilder();

        for (int i = 1; i < 10; ++i) {
            sb.append("X");
        }

        this.name = sb.toString();
        this.homeAddress = sb.toString();
        this.companyAddress = sb.toString();
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public String getHomeAddress() {
        return homeAddress;
    }

    public void setHomeAddress(String homeAddress) {
        this.homeAddress = homeAddress;
    }

    public String getCompanyAddress() {
        return companyAddress;
    }

    public void setCompanyAddress(String companyAddress) {
        this.companyAddress = companyAddress;
    }
}

  接口MyService:

package com.cxf.compress.ws;
import java.util.List;

import javax.jws.WebMethod;
import javax.jws.WebResult;
import javax.jws.WebService;

@WebService
public interface MyService{
    @WebMethod
    @WebResult(name = "personResult")
    public List<Person> getPerson();
}

  实现类MyServiceImpl:

package com.cxf.compress.ws;
import java.util.ArrayList;
import java.util.List;

public class MyServiceImpl implements MyService {
    @Override
    public List<Person> getPerson() {
        List<Person> list = new ArrayList<Person>();

        list.add(new Person());
        list.add(new Person());
        list.add(new Person());
        list.add(new Person());

        return list;
    }
}

  服务端启动类:

package com.cxf.compress.ws;

import org.apache.cxf.interceptor.LoggingInInterceptor;
import org.apache.cxf.interceptor.LoggingOutInterceptor;
import org.apache.cxf.jaxws.JaxWsServerFactoryBean;
import org.apache.cxf.transport.common.gzip.GZIPInInterceptor;
import org.apache.cxf.transport.common.gzip.GZIPOutInterceptor;
/**
 * 使用数据压缩
 *
 */
public class MyServer {
    public static void main(String[] args) {
        JaxWsServerFactoryBean factoryBean = new JaxWsServerFactoryBean();

        factoryBean.getInInterceptors().add(new GZIPInInterceptor());//解压使用 gzip的请求
        factoryBean.getInInterceptors().add(new LoggingInInterceptor());

        GZIPOutInterceptor out = new GZIPOutInterceptor();
        out.setThreshold(0);//压缩数据的阀值设置为0, 默认超过1k的数据才使用gzip压缩,设置为0,表示响应的数据只要大于0就进行压缩处理
        
        factoryBean.getOutInterceptors().add(out);//返回的数据使用gizp压缩
        factoryBean.getOutInterceptors().add(new LoggingOutInterceptor());

        String address = "http://localhost:8080/cxf/myservice";
        factoryBean.setAddress(address);
        factoryBean.setServiceClass(MyServiceImpl.class);

        factoryBean.create();
    }
}

  客户端请求类:

package com.cxf.compress.ws;

import java.util.List;

import org.apache.cxf.endpoint.Client;
import org.apache.cxf.endpoint.Endpoint;
import org.apache.cxf.frontend.ClientProxy;
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
import org.apache.cxf.transport.common.gzip.GZIPInInterceptor;
import org.apache.cxf.transport.common.gzip.GZIPOutInterceptor;

public class MyClient {
    public static void main(String[] args) {
        JaxWsProxyFactoryBean factoryBean = new JaxWsProxyFactoryBean();

        factoryBean.setAddress("http://localhost:8080/cxf/myservice");
        factoryBean.setServiceClass(MyService.class);

        Object object = factoryBean.create();

        Client client = ClientProxy.getClient(object);

        Endpoint endpoint = client.getEndpoint();

        endpoint.getInInterceptors().add(new GZIPInInterceptor());//解压使用 gzip的请求

        GZIPOutInterceptor out = new GZIPOutInterceptor();
        out.setThreshold(0);//压缩数据的阀值设置为0, 默认超过1k的数据才使用gzip压缩,设置为0,表示响应的数据只要大于0就进行压缩处理

        endpoint.getOutInterceptors().add(out);//返回的数据使用gizp压缩

        MyService service = (MyService) object;

        List<Person> list = service.getPerson();

        System.out.println("name: " + list.get(0).getName());
    }
}

  2、restful web service

  实体类Person:

package com.cxf.compress.rs;

import java.util.Date;

import javax.xml.bind.annotation.XmlRootElement;

@XmlRootElement(name = "person")
public class Person {

    private int id;
    private String name;
    private Date date;

    public int getId() {
        return id;
    }

    public void setId(int id) {
        this.id = id;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public Date getDate() {
        return date;
    }

    public void setDate(Date date) {
        this.date = date;
    }

}

  服务接口MyService:

package com.cxf.compress.rs;

import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;

@Path("person") 
@Produces("*/*")
public interface MyService {

    @GET
    @Path("/")
    public java.util.List<Person> getAll();

}

  服务接口实现类MyServiceImpl:

package com.cxf.compress.rs;
import java.util.Date;
import java.util.List;

import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;

public class MyServiceImpl implements MyService {
 
    @Override
    public List<Person> getAll() {
        List<Person> persons = new java.util.ArrayList<Person>();
        Person person = new Person();
        person.setId(111);
        person.setName("zhangsan");
        person.setDate(new Date());

        Person person2 = new Person();
        person2.setId(222);
        person2.setName("lisi");
        person2.setDate(new Date());
        persons.add(person);
        persons.add(person2);
        return persons;
    }
 
}

  服务类Server:

package com.cxf.compress.rs;

import org.apache.cxf.interceptor.LoggingInInterceptor;
import org.apache.cxf.interceptor.LoggingOutInterceptor;
import org.apache.cxf.jaxrs.JAXRSServerFactoryBean;
import org.apache.cxf.transport.common.gzip.GZIPInInterceptor;
import org.apache.cxf.transport.common.gzip.GZIPOutInterceptor;

public class Server {
    public static void main(String[] args) {

        JAXRSServerFactoryBean factoryBean = new JAXRSServerFactoryBean();
        factoryBean.setAddress("http://localhost:8080/myservice");
        factoryBean.setResourceClasses(MyServiceImpl.class);
        
        factoryBean.getInInterceptors().add(new GZIPInInterceptor());//解压使用 gzip的请求
        factoryBean.getInInterceptors().add(new LoggingInInterceptor());
        
        GZIPOutInterceptor out = new GZIPOutInterceptor();
        out.setThreshold(0);//压缩数据的阀值设置为0, 默认超过1k的数据才使用gzip压缩,设置为0,表示响应的数据只要大于0就进行压缩处理
        
        factoryBean.getOutInterceptors().add(out);//返回的数据使用gizp压缩
        factoryBean.getOutInterceptors().add(new LoggingOutInterceptor());
        
        factoryBean.create();
    }
}

  客户端访问类Client:

package com.cxf.compress.rs;

import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;

import org.apache.cxf.helpers.IOUtils;
import org.apache.cxf.io.CachedOutputStream;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;

public class Client {
    public static void main(String[] args) throws Exception {
        String getResult = get("http://localhost:8080/myservice/person");
        System.out.println(getResult);

    }

    private static String get(String url) throws IOException,
            ParserConfigurationException {
        HttpGet get = new HttpGet(url);
        get.setHeader("Accept", "application/json");
        get.addHeader("Accept-Encoding" ,"gzip"); //请求使用数据压缩
        CloseableHttpClient client = HttpClients.createDefault();
        String responseContent = null;
        CloseableHttpResponse response = null;
        try {
            response = client.execute(get);
            HttpEntity entity = response.getEntity();// 响应体
            if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {// 响应码
                responseContent = EntityUtils.toString(entity, "UTF-8");
            }
        } catch (ClientProtocolException e) {
            e.printStackTrace();
        }

        return responseContent;
    }

}

  对于使用了gzip进行压缩之后的响应,通过topMon进行监听后,传输的数据大小会小很多。  

目录
相关文章
|
分布式计算 Hadoop
30 MAPREDUCE数据压缩
30 MAPREDUCE数据压缩
58 0
|
前端开发 Java 大数据
SpringBoot - 前后端大数据压缩传输 Gzip 解决方案(手动版)
SpringBoot - 前后端大数据压缩传输 Gzip 解决方案(手动版)
815 0
|
6月前
|
存储 分布式计算 算法
Hadoop性能优化数据压缩和编码
【6月更文挑战第8天】
76 6
|
7月前
OpenFeign数据压缩
OpenFeign数据压缩
|
7月前
|
存储 分布式计算 算法
MapReduce【数据压缩】
MapReduce【数据压缩】
|
编解码 算法 Apache
Apache Commons Codec:各种加密了解一下
Apache Commons Codec 简介 Apache Commons Codec: Apache Commons Codec (TM) software provides implementations of common encoders and decoders such as Base64, Hex, Phonetic and URLs. Apache Commons Codec(TM)软件提供常见编码器和解码器的实现,如Base64,Hex,Phonetic和URL。
2926 0
|
缓存 Java Maven
java序列化机制之protoStuff
之前曾经写了两篇java的序列的机制,一种是默认的java序列化机制,这种方式效率太低。另外一种是谷歌的protobuf,但是这种我们还要写proto文件,并且我们还要使用工具来编译生成java文件,实在太麻烦。但是protostuff却不一样,能够很好的解决上面两者的问题。这篇文章就研究一下如何去使用,并对其进行一个简单的分析。
518 0
|
编解码 分布式计算 算法
snappy流式编解码总结
snappy流式编解码总结
788 0