Java 类net.sf.ehcache.constructs.web.AlreadyGzippedException 实例源码

项目:lutece-core    文件:HeadersPageCachingFilter.java   
/**
 * {@inheritDoc }
 */
@Override
protected void doFilter( HttpServletRequest request, HttpServletResponse response, FilterChain chain ) throws AlreadyGzippedException,
        AlreadyCommittedException, FilterNonReentrantException, LockTimeoutException, Exception
{
    if ( !_bInit )
    {
        init( );
    }

    if ( _bEnable )
    {
        super.doFilter( request, response, chain );
        _logger.debug( "URI served from cache : " + request.getRequestURI( ) );
    }
    else
    {
        chain.doFilter( request, response );
    }
}
项目:sharks    文件:RestCacheFilter.java   
@Override
protected void doFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws AlreadyGzippedException,
        AlreadyCommittedException, FilterNonReentrantException, LockTimeoutException, Exception {

    String url = request.getRequestURL().toString();
    if (matchExcludePatterns(url)) {
        chain.doFilter(request, response);
        return;
    }

    super.doFilter(request, response, chain);
}
项目:xslweb    文件:CachingFilter.java   
@Override
protected void doFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws AlreadyGzippedException, AlreadyCommittedException, FilterNonReentrantException, LockTimeoutException, Exception {
  WebApp webApp = (WebApp) request.getAttribute(Definitions.ATTRNAME_WEBAPP);
  PipelineHandler pipelineHandler = (PipelineHandler) request.getAttribute(Definitions.ATTRNAME_PIPELINEHANDLER);
  if (!webApp.getDevelopmentMode() && pipelineHandler.getCache()) {
    super.doFilter(request, response, chain);                  
  } else {
    chain.doFilter(request, response);
  }
}
项目:xslweb    文件:SimpleCachingHeadersPageCachingFilter.java   
/**
 * Builds the PageInfo object by passing the request along the filter chain
 * <p>
 * The following headers are set:
 * <ul>
 * <li>Last-Modified
 * <li>Expires
 * <li>Cache-Control
 * <li>ETag
 * </ul>
 * Any of these headers aleady set in the response are ignored, and new ones
 * generated. To control your own caching headers, use
 * {@link SimplePageCachingFilter}.
 * 
 * 
 * @param request
 * @param response
 * @param chain
 * @return a Serializable value object for the page or page fragment
 * @throws AlreadyGzippedException
 *           if an attempt is made to double gzip the body
 * @throws Exception
 * 
 */
@Override
protected PageInfo buildPage(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws AlreadyGzippedException, Exception {
  PageInfo pageInfo = super.buildPage(request, response, chain);

  PipelineHandler pipelineHandler = (PipelineHandler) request.getAttribute(Definitions.ATTRNAME_PIPELINEHANDLER);
  if (pipelineHandler.getCacheHeaders()) {
    final List<Header<? extends Serializable>> headers = pageInfo.getHeaders();

    long ttlMilliseconds = calculateTimeToLiveMilliseconds();

    // Remove any conflicting headers
    for (final Iterator<Header<? extends Serializable>> headerItr = headers.iterator(); headerItr.hasNext();) {
      final Header<? extends Serializable> header = headerItr.next();

      final String name = header.getName();
      if ("Last-Modified".equalsIgnoreCase(name) || "Expires".equalsIgnoreCase(name) || "Cache-Control".equalsIgnoreCase(name) || "ETag".equalsIgnoreCase(name)) {
        headerItr.remove();
      }
    }

    // add expires and last-modified headers

    // trim the milliseconds off the value since the header is only accurate
    // down to the second
    long lastModified = pageInfo.getCreated().getTime();
    lastModified = TimeUnit.MILLISECONDS.toSeconds(lastModified);
    lastModified = TimeUnit.SECONDS.toMillis(lastModified);

    headers.add(new Header<Long>("Last-Modified", lastModified));
    headers.add(new Header<Long>("Expires", System.currentTimeMillis() + ttlMilliseconds));
    headers.add(new Header<String>("Cache-Control", "max-age=" + ttlMilliseconds / MILLISECONDS_PER_SECOND));
    headers.add(new Header<String>("ETag", generateEtag(ttlMilliseconds)));
  }

  return pageInfo;
}
项目:xslweb    文件:CachingFilter.java   
/**
 * Builds the PageInfo object by passing the request along the filter chain
 * 
 * @param request
 * @param response
 * @param chain
 * @return a Serializable value object for the page or page fragment
 * @throws AlreadyGzippedException
 *           if an attempt is made to double gzip the body
 * @throws Exception
 */
protected PageInfo buildPage(final HttpServletRequest request, final HttpServletResponse response, final FilterChain chain) throws AlreadyGzippedException, Exception {

  // Invoke the next entity in the chain
  final ByteArrayOutputStream outstr = new ByteArrayOutputStream();
  final GenericResponseWrapper wrapper = new GenericResponseWrapper(response, outstr);
  chain.doFilter(request, wrapper);
  wrapper.flush();

  long timeToLiveSeconds = blockingCache.getCacheConfiguration().getTimeToLiveSeconds();

  // Return the page info
  return new PageInfo(wrapper.getStatus(), wrapper.getContentType(), wrapper.getCookies(), outstr.toByteArray(), true, timeToLiveSeconds, wrapper.getAllHeaders());
}
项目:mtools    文件:PageEhCacheFilter.java   
@Override
protected void doFilter(final HttpServletRequest request,
        final HttpServletResponse response, final FilterChain chain)
        throws AlreadyGzippedException, AlreadyCommittedException,
        FilterNonReentrantException, LockTimeoutException, Exception {
    if (cacheURLs == null) {
        init();
    }

    String url = request.getRequestURI();
    boolean flag = false;
    if (cacheURLs != null && cacheURLs.length > 0) {
        for (String cacheURL : cacheURLs) {
            if (url.contains(cacheURL.trim())) {
                flag = true;
                break;
            }
        }
    }
    // 如果包含我们要缓存的url 就缓存该页面,否则执行正常的页面转向
    if (flag) {
        String query = request.getQueryString();
        if (query != null) {
            query = "?" + query;
        }
        log.info("当前请求被缓存:" + url + query);
        super.doFilter(request, response, chain);
    } else {
        chain.doFilter(request, response);
    }
}
项目:Free-Choice.Net    文件:MyPageCachingFilter.java   
@Override
    protected void doFilter(final HttpServletRequest request,
            final HttpServletResponse response, final FilterChain chain)
            throws AlreadyGzippedException, AlreadyCommittedException,
            FilterNonReentrantException, LockTimeoutException, Exception {


System.err.println("---------cached-----------");
        super.doFilter(request, response, chain);

//        if (cacheURLs == null) {
//            init();
//        }
//        
//        String url = request.getRequestURI();
//        
//        boolean flag = false;
//        
//        if (cacheURLs != null && cacheURLs.length > 0) {
//            for (String cacheURL : cacheURLs) {
//                if (url.contains(cacheURL.trim())) {
//                    flag = true;
//                    break;
//                }
//            }
//        }
//        flag = true;
//        // 如果包含我们要缓存的url 就缓存该页面,否则执行正常的页面转向
//        if (flag) {
//            String query = request.getQueryString();
//System.err.println("request.getQueryString():" + request.getQueryString());
//            if (query != null) {
//                query = "?" + query;
//            }
//System.err.println("当前请求被缓存:" + url + query);
//            super.doFilter(request, response, chain);
//        } else {
//            chain.doFilter(request, response);
//        }
    }
项目:xslweb    文件:CachingFilter.java   
/**
 * Performs the filtering for a request. This method caches based responses
 * keyed by {@link #calculateKey(javax.servlet.http.HttpServletRequest)}
 * <p/>
 * By default this method will queue requests requesting the page response for
 * a given key until the first thread in the queue has completed. The request
 * which occurs when the page expires incurs the cost of waiting for the
 * downstream processing to return the respone.
 * <p/>
 * The maximum time to wait can be configured by setting
 * <code>setTimeoutMillis</code> on the underlying <code>BlockingCache</code>.
 * 
 * @param request
 * @param response
 * @param chain
 * @throws AlreadyGzippedException
 *           if a double gzip is attempted
 * @throws AlreadyCommittedException
 *           if the response was committed on the way in or the on the way
 *           back
 * @throws FilterNonReentrantException
 *           if an attempt is made to reenter this filter in the same request.
 * @throws LockTimeoutException
 *           if this request is waiting on another that is populating the
 *           cache entry and timeouts while waiting. Only occurs if the
 *           BlockingCache has a timeout set.
 * @throws Exception
 *           for all other exceptions. They will be caught and logged in
 *           {@link Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)}
 */
protected void doFilter(final HttpServletRequest request, final HttpServletResponse response, final FilterChain chain) throws AlreadyGzippedException, AlreadyCommittedException, FilterNonReentrantException, LockTimeoutException, Exception {
  if (response.isCommitted()) {
    throw new AlreadyCommittedException("Response already committed before doing buildPage.");
  }
  logRequestHeaders(request);
  PageInfo pageInfo = buildPageInfo(request, response, chain);

  if (pageInfo.isOk()) {
    if (response.isCommitted()) {
      throw new AlreadyCommittedException("Response already committed after doing buildPage" + " but before writing response from PageInfo.");
    }
    writeResponse(request, response, pageInfo);
  }
}