@Override @SuppressWarnings("unchecked") public List<PostPO> searchByTag(Paging paigng, String tag) { FullTextSession fullTextSession = Search.getFullTextSession(super.session()); SearchFactory sf = fullTextSession.getSearchFactory(); QueryBuilder qb = sf.buildQueryBuilder().forEntity(PostPO.class).get(); org.apache.lucene.search.Query luceneQuery = qb.phrase().onField("tags").sentence(tag).createQuery(); FullTextQuery query = fullTextSession.createFullTextQuery(luceneQuery); query.setFirstResult(paigng.getFirstResult()); query.setMaxResults(paigng.getMaxResults()); Sort sort = new Sort(new SortField("id", SortField.Type.LONG, true)); query.setSort(sort); paigng.setTotalCount(query.getResultSize()); return query.list(); }
@GET @Path("/{name}") @Produces(MediaType.TEXT_HTML) public Response sayHtmlHello(@PathParam("name") String name) { String output = "<html> " + "<title>" + "Hello " + name + "</title>" + "<body><h1>" + "Hello " + name + "<br>Database Index will now be rebuilt..." + "<br>Please make sure the system is put into maintenance or else or the query will return nothing." + "</body></h1>" + "</html> "; Session session = DBUtil.getFactory().openSession(); FullTextSession fullTextSession = Search.getFullTextSession(session); try { System.out.println("@@ database re-indexing now begin..."); fullTextSession.createIndexer().startAndWait(); } catch (Exception exc) { exc.printStackTrace(); } return Response.status(200).entity(output).build(); }
@RequestMapping(method = RequestMethod.GET, value = FONDS + SLASH + "all" + SLASH) public ResponseEntity<FondsHateoas> findAllFonds( final UriComponentsBuilder uriBuilder, HttpServletRequest request, final HttpServletResponse response, @RequestParam(name = "filter", required = false) String filter) { Session session = entityManager.unwrap(Session.class); FullTextSession fullTextSession = Search.getFullTextSession(session); QueryDescriptor query = ElasticsearchQueries.fromQueryString("title:test fonds"); List<Fonds> result = fullTextSession.createFullTextQuery(query, Fonds.class).list(); FondsHateoas fondsHateoas = new FondsHateoas((ArrayList<INikitaEntity>) (ArrayList) result); fondsHateoasHandler.addLinks(fondsHateoas, request, new Authorisation()); return ResponseEntity.status(HttpStatus.OK) .allow(CommonUtils.WebUtils.getMethodsForRequestOrThrow(request.getServletPath())) .body(fondsHateoas); }
private void handleListIndexing( Collection<? extends DomainObject<?>> list) { Session session = getSession(); if (list == null || session == null) { return; } FullTextSession fts = Search.getFullTextSession(session); Transaction tx = fts.beginTransaction(); for (DomainObject<?> obj : list) { if (obj != null) { fts.index(obj); } } tx.commit(); }
private void emptyProductIndex() throws Exception { runTX(new Callable<Void>() { @Override public Void call() throws Exception { Session session = dm.getSession(); if (session != null) { FullTextSession fullTextSession = Search .getFullTextSession(session); fullTextSession.purgeAll(Product.class); } return null; } }); }
private void emptySubscriptionIndex() throws Exception { runTX(new Callable<Void>() { @Override public Void call() throws Exception { Session session = dm.getSession(); if (session != null) { FullTextSession fullTextSession = Search .getFullTextSession(session); fullTextSession.purgeAll(Subscription.class); } return null; } }); }
/** * Regenerates all the indexed class indexes * * @param async true if the reindexing will be done as a background thread * @param sess the hibernate session */ public static void reindexAll(boolean async, Session sess) { FullTextSession txtSession = Search.getFullTextSession(sess); MassIndexer massIndexer = txtSession.createIndexer(); massIndexer.purgeAllOnStart(true); try { if (!async) { massIndexer.startAndWait(); } else { massIndexer.start(); } } catch (InterruptedException e) { log.error("mass reindexing interrupted: " + e.getMessage()); } finally { txtSession.flushToIndexes(); } }
/** * Test provide and dispose. */ @Test public void testProvideDispose() { SessionFactory sessionFactory = locator.getService(SessionFactory.class); Session hibernateSession = sessionFactory.openSession(); FullTextSession ftSession = Search.getFullTextSession(hibernateSession); FulltextSearchFactoryFactory factory = new FulltextSearchFactoryFactory(ftSession); // Make sure that we can create a search factory. SearchFactory searchFactory = factory.provide(); Assert.assertNotNull(searchFactory); // Make sure we can dispose of the factory (does nothing, sadly). factory.dispose(searchFactory); if (hibernateSession.isOpen()) { hibernateSession.close(); } }
/** * * @param clazz */ private long reindexMassIndexer(final Class< ? > clazz) { final Session session = getSession(); final Criteria criteria = createCriteria(session, clazz, null, true); final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from). log.info("Starting (mass) re-indexing of " + number + " entries of type " + clazz.getName() + "..."); final FullTextSession fullTextSession = Search.getFullTextSession(session); try { fullTextSession.createIndexer(clazz)// .batchSizeToLoadObjects(25) // //.cacheMode(CacheMode.NORMAL) // .threadsToLoadObjects(5) // //.threadsForIndexWriter(1) // .threadsForSubsequentFetching(20) // .startAndWait(); } catch (final InterruptedException ex) { log.error("Exception encountered while reindexing: " + ex.getMessage(), ex); } final SearchFactory searchFactory = fullTextSession.getSearchFactory(); searchFactory.optimize(clazz); log.info("Re-indexing of " + number + " objects of type " + clazz.getName() + " done."); return number; }
/** * This method is for internal use e. g. for updating objects without check access. * @param obj * @return the generated identifier. */ @Transactional(readOnly = false, propagation = Propagation.REQUIRES_NEW, isolation = Isolation.REPEATABLE_READ) public Serializable internalSave(final O obj) { Validate.notNull(obj); obj.setCreated(); obj.setLastUpdate(); onSave(obj); onSaveOrModify(obj); final Session session = getHibernateTemplate().getSessionFactory().getCurrentSession(); final Serializable id = session.save(obj); log.info("New object added (" + id + "): " + obj.toString()); prepareHibernateSearch(obj, OperationType.INSERT); session.flush(); Search.getFullTextSession(session).flushToIndexes(); afterSaveOrModify(obj); afterSave(obj); return id; }
@Transactional(readOnly = false, propagation = Propagation.REQUIRES_NEW, isolation = Isolation.REPEATABLE_READ) public void internalMarkAsDeleted(final O obj) { if (obj instanceof Historizable == false) { log.error("Object is not historizable. Therefore marking as deleted is not supported. Please use delete instead."); throw new InternalErrorException(); } onDelete(obj); final O dbObj = getHibernateTemplate().load(clazz, obj.getId(), LockMode.PESSIMISTIC_WRITE); onSaveOrModify(obj); copyValues(obj, dbObj, "deleted"); // If user has made additional changes. dbObj.setDeleted(true); dbObj.setLastUpdate(); final Session session = getHibernateTemplate().getSessionFactory().getCurrentSession(); session.flush(); Search.getFullTextSession(session).flushToIndexes(); afterSaveOrModify(obj); afterDelete(obj); getSession().flush(); log.info("Object marked as deleted: " + dbObj.toString()); }
@Transactional(readOnly = false, propagation = Propagation.REQUIRES_NEW, isolation = Isolation.REPEATABLE_READ) public void internalUndelete(final O obj) { final O dbObj = getHibernateTemplate().load(clazz, obj.getId(), LockMode.PESSIMISTIC_WRITE); onSaveOrModify(obj); copyValues(obj, dbObj, "deleted"); // If user has made additional changes. dbObj.setDeleted(false); obj.setDeleted(false); dbObj.setLastUpdate(); obj.setLastUpdate(dbObj.getLastUpdate()); log.info("Object undeleted: " + dbObj.toString()); final Session session = getHibernateTemplate().getSessionFactory().getCurrentSession(); session.flush(); Search.getFullTextSession(session).flushToIndexes(); afterSaveOrModify(obj); afterUndelete(obj); }
private void handleListIndexing(Collection<? extends DomainObject<?>> list, Session session) { if (list == null || session == null) { return; } FullTextSession fts = Search.getFullTextSession(session); for (DomainObject<?> obj : list) { if (obj != null) { fts.index(obj); } } }
private void handleObjectIndexing(Object parameter, Session session) { if (parameter == null || session == null) { return; } FullTextSession fts = Search.getFullTextSession(session); fts.index(parameter); }
protected void applyQueryImpl(Query query) { EntityManager em = EntityManagerFactoryUtils.getTransactionalEntityManager(emf); if (em == null) { entityManager = emf.createEntityManager(); em = entityManager; } FullTextSession fullTextSession = Search.getFullTextSession(em.unwrap(Session.class)); fullTextQuery = fullTextSession.createFullTextQuery(query, entityClass); }
@Override @SuppressWarnings("unchecked") public List<Post> search(Paging paging, String q) throws Exception { FullTextSession fullTextSession = Search.getFullTextSession(super.session()); SearchFactory sf = fullTextSession.getSearchFactory(); QueryBuilder qb = sf.buildQueryBuilder().forEntity(PostPO.class).get(); org.apache.lucene.search.Query luceneQuery = qb.keyword().onFields("title","summary","tags").matching(q).createQuery(); FullTextQuery query = fullTextSession.createFullTextQuery(luceneQuery); query.setFirstResult(paging.getFirstResult()); query.setMaxResults(paging.getMaxResults()); StandardAnalyzer standardAnalyzer = new StandardAnalyzer(); SimpleHTMLFormatter formatter = new SimpleHTMLFormatter("<span style='color:red;'>", "</span>"); QueryScorer queryScorer = new QueryScorer(luceneQuery); Highlighter highlighter = new Highlighter(formatter, queryScorer); List<PostPO> list = query.list(); List<Post> rets = new ArrayList<>(list.size()); for (PostPO po : list) { Post m = BeanMapUtils.copy(po, 0); // 处理高亮 String title = highlighter.getBestFragment(standardAnalyzer, "title", m.getTitle()); String summary = highlighter.getBestFragment(standardAnalyzer, "summary", m.getSummary()); if (StringUtils.isNotEmpty(title)) { m.setTitle(title); } if (StringUtils.isNotEmpty(summary)) { m.setSummary(summary); } rets.add(m); } paging.setTotalCount(query.getResultSize()); return rets; }
/** * Do real indexes optimization. */ public static void optimizeIndexes() throws Exception { FullTextSession ftSession = null; Session session = null; if (optimizeIndexesRunning) { log.warn("*** Optimize indexes already running ***"); } else { optimizeIndexesRunning = true; log.debug("*** Begin optimize indexes ***"); try { session = HibernateUtil.getSessionFactory().openSession(); ftSession = Search.getFullTextSession(session); // Optimize indexes SearchFactory searchFactory = ftSession.getSearchFactory(); searchFactory.optimize(); } catch (Exception e) { throw e; } finally { optimizeIndexesRunning = false; HibernateUtil.close(ftSession); HibernateUtil.close(session); } log.debug("*** End optimize indexes ***"); } }
@SuppressWarnings("unchecked") public User findUserByPhoneNumber(String phoneNumber) { User user = null; if (!StringUtil.isEmpty(phoneNumber)) { Session session = DBUtil.getFactory().openSession(); FullTextSession fullTextSession = Search.getFullTextSession(session); try { // not necessary if database index is presence already //fullTextSession.createIndexer().startAndWait(); StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_35); BooleanQuery finalQuery = new BooleanQuery(); QueryParser queryParser = new QueryParser(Version.LUCENE_35, UserConstants.DOMAIN_NAME_PHONE, analyzer); Query query = queryParser.parse(phoneNumber); finalQuery.add(query, Occur.MUST); org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(finalQuery); List<User> result = hibQuery.list(); if (result != null && result.size() > 0) { user = result.get(0); } else { LoggerUtil.info(this.getClass().getName(), "No user with phone number = " + phoneNumber + " can be found."); } } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } finally { fullTextSession.disconnect(); session.disconnect(); } } return user; }
private void handleObjectIndexing(Object parameter) { Session session = getSession(); if (parameter == null || session == null) { return; } FullTextSession fts = Search.getFullTextSession(session); Transaction tx = fts.beginTransaction(); fts.index(parameter); tx.commit(); }
@Override public void rebuildSearchIndexes() { // For some reason this generates an exception, something about unable // to synchronize transactions // FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(em); // try // { // fullTextEntityManager.createIndexer().startAndWait(); // } // catch (InterruptedException e) { throw new RuntimeException(e); } // This alternative code (from the Hibernate Search docs) seems to work // although it generates a warning message when complete. Not going to // worry about it, this code doesn't run often. final int BATCH_SIZE = 128; org.hibernate.Session session = ((EntityManagerImpl)this.em.getDelegate()).getSession(); FullTextSession fullTextSession = Search.getFullTextSession(session); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); Transaction transaction = fullTextSession.beginTransaction(); //Scrollable results will avoid loading too many objects in memory ScrollableResults results = fullTextSession.createCriteria(Mail.class) .setFetchSize(BATCH_SIZE) .scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++; fullTextSession.index(results.get(0)); //index each element if (index % BATCH_SIZE == 0) { fullTextSession.flushToIndexes(); //apply changes to indexes fullTextSession.clear(); //free memory since the queue is processed } } transaction.commit(); }
/** * {@inheritDoc} */ public List<T> search(String searchTerm) throws SearchException { Session sess = getSession(); FullTextSession txtSession = Search.getFullTextSession(sess); org.apache.lucene.search.Query qry; try { qry = HibernateSearchTools.generateQuery(searchTerm, this.persistentClass, sess, defaultAnalyzer); } catch (ParseException ex) { throw new SearchException(ex); } org.hibernate.search.FullTextQuery hibQuery = txtSession.createFullTextQuery(qry, this.persistentClass); return hibQuery.list(); }
/** * Regenerates the index for a given class * * @param clazz the class * @param sess the hibernate session */ public static void reindex(Class clazz, Session sess) { FullTextSession txtSession = Search.getFullTextSession(sess); MassIndexer massIndexer = txtSession.createIndexer(clazz); try { massIndexer.startAndWait(); } catch (InterruptedException e) { log.error("mass reindexing interrupted: " + e.getMessage()); } finally { txtSession.flushToIndexes(); } }
@SuppressWarnings("unchecked") @DataProvider public List<SearchResult> search(String searchText) throws Exception { FullTextSession fullTextSession = Search.getFullTextSession(exampleDao .getSession()); if (!indexRebuilded) { indexRebuilded = true; fullTextSession.createIndexer().startAndWait(); } MultiFieldQueryParser parser = new MultiFieldQueryParser( Version.LUCENE_31, new String[] { "label", "tags", "url", "summary" }, new StandardAnalyzer(Version.LUCENE_31)); org.apache.lucene.search.Query luceneQuery = parser.parse(searchText); FullTextQuery query = fullTextSession.createFullTextQuery(luceneQuery, Example.class); query.setFirstResult(0); query.setMaxResults(100); List<SearchResult> searchResults = new ArrayList<SearchResult>(); for (Example example : (List<Example>) query.list()) { SearchResult searchResult = new SearchResult(); PropertyUtils.copyProperties(searchResult, example); searchResults.add(searchResult); } return searchResults; }
@Override @Transactional public void reindex() throws Exception { if (this.entityMetadata.isIndexed()) { Search.getFullTextSession(this.buildSession()).createIndexer(this.entityImplClass).startAndWait(); } }
private FullTextQuery createQuery(String criteria) throws ParseException { // FIXME: Should not hardcode the analyzer QueryParser parser = new QueryParser("text", new StandardAnalyzer()); Query luceneQuery = parser.parse(criteria); return Search.createFullTextSession(session).createFullTextQuery(luceneQuery, Post.class); }
/** * Shows the main statistics page */ public void list() { IndexReader indexReader = null; ReaderProvider readerProvider = null; try { SearchFactory searchFactory = Search.createFullTextSession( this.sessionFactory.getCurrentSession()).getSearchFactory(); DirectoryProvider<?> directoryProvider = searchFactory .getDirectoryProviders(Post.class)[0]; readerProvider = searchFactory.getReaderProvider(); indexReader = readerProvider.openReader(directoryProvider); String indexDirectory = directoryProvider.getDirectory().toString(); indexDirectory = indexDirectory.substring(indexDirectory .indexOf('@') + 1); boolean indexExists = IndexReader.indexExists(indexDirectory); this.result.include("indexExists", indexExists); if (indexExists) { this.result.include("numberOfDocs", indexReader.numDocs()); this.result.include("indexLocation", indexDirectory); this.result.include("totalMessages", this.forumRepository.getTotalMessages()); this.result.include("isLocked", IndexReader.isLocked(indexDirectory)); this.result.include("lastModified", new Date(IndexReader.lastModified(indexDirectory))); } } catch (IOException e) { throw new ForumException(e); } finally { if (readerProvider != null && indexReader != null) { readerProvider.closeReader(indexReader); } } }
/** * Rebuild the search index during context initialization. * * @param servletContextEvent The context event (not really used). */ @Override public void contextInitialized( final ServletContextEvent servletContextEvent) { logger.info("Rebuilding Search Index..."); // Build the session factory. SessionFactory factory = createSessionFactory(); // Build the hibernate session. Session session = factory.openSession(); // Create the fulltext session. FullTextSession fullTextSession = Search.getFullTextSession(session); try { fullTextSession .createIndexer() .startAndWait(); } catch (InterruptedException e) { logger.warn("Search reindex interrupted. Good luck!"); logger.trace("Error:", e); } finally { // Close everything and release the lock file. session.close(); factory.close(); } }
/** * Assert that the index is created on startup. * * @throws Exception Any unexpected exceptions. */ @Test @PrepareForTest({Search.class, SearchIndexContextListener.class}) public void testOnStartup() throws Exception { // Set up a fake session factory SessionFactory mockFactory = mock(SessionFactory.class); Session mockSession = mock(Session.class); when(mockFactory.openSession()).thenReturn(mockSession); // Set up a fake indexer MassIndexer mockIndexer = mock(MassIndexer.class); // Set up our fulltext session. FullTextSession mockFtSession = mock(FullTextSession.class); when(mockFtSession.createIndexer()) .thenReturn(mockIndexer); // This is the way to tell PowerMock to mock all static methods of a // given class mockStatic(Search.class); when(Search.getFullTextSession(mockSession)) .thenReturn(mockFtSession); SearchIndexContextListener listener = mock(SearchIndexContextListener.class); doReturn(mockFactory).when(listener).createSessionFactory(); doCallRealMethod().when(listener).contextInitialized(any()); // Run the test listener.contextInitialized(mock(ServletContextEvent.class)); verify(mockIndexer).startAndWait(); // Verify that the session was closed. verify(mockSession).close(); }
/** * Assert that an interrupted exception exits cleanly. * * @throws Exception An exception that might be thrown. */ @Test @PrepareForTest({Search.class, SearchIndexContextListener.class}) public void testInterruptedIndex() throws Exception { // Set up a fake session factory SessionFactory mockFactory = mock(SessionFactory.class); Session mockSession = mock(Session.class); when(mockFactory.openSession()).thenReturn(mockSession); // Set up a fake indexer MassIndexer mockIndexer = mock(MassIndexer.class); // Set up our fulltext session. FullTextSession mockFtSession = mock(FullTextSession.class); when(mockFtSession.createIndexer()) .thenReturn(mockIndexer); doThrow(new InterruptedException()) .when(mockIndexer) .startAndWait(); // This is the way to tell PowerMock to mock all static methods of a // given class mockStatic(Search.class); when(Search.getFullTextSession(mockSession)) .thenReturn(mockFtSession); SearchIndexContextListener listener = mock(SearchIndexContextListener.class); doReturn(mockFactory).when(listener).createSessionFactory(); doCallRealMethod().when(listener).contextInitialized(any()); // Run the test listener.contextInitialized(mock(ServletContextEvent.class)); // Verify that the session was closed. verify(mockSession).close(); verify(mockFactory).close(); }
@Override public void afterPropertiesSet() throws Exception { //重建索引 FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.openSession()); fullTextSession.createIndexer().startAndWait(); }
@Override public synchronized final FullTextSession getFullTextSession() { FullTextSession fts = Local.get(SearchDao.FULL_TEXT_SESSION_KEY, FullTextSession.class); if (fts == null || !fts.isOpen()) { fts = Search.getFullTextSession(getSession()); Local.put(SearchDao.FULL_TEXT_SESSION_KEY, fts); log.debug("새로운 FullTextSession을 생성했습니다."); } return fts; }
@Override public synchronized final FullTextSession getFullTextSession() { FullTextSession fts = Local.get(FULL_TEXT_SESSION_KEY, FullTextSession.class); // 현 Thread에 FullTextSession이 없거나, 있다하더라도 기존에 사용하던 것이 닫혔다면 if (fts == null || !fts.isOpen()) { fts = Search.getFullTextSession(getSession()); Local.put(FULL_TEXT_SESSION_KEY, fts); log.debug("새로운 FullTextSession을 생성했습니다."); } return fts; }
@Override public synchronized final FullTextSession getFullTextSession() { FullTextSession fts = Local.get(IHibernateOgmDao.FULL_TEXT_SESSION_KEY, FullTextSession.class); if (fts == null || !fts.isOpen()) { fts = Search.getFullTextSession(getSession()); Local.put(IHibernateOgmDao.FULL_TEXT_SESSION_KEY, fts); log.debug("새로운 FullTextSession을 생성했습니다."); } return fts; }
@Test public void hibernateSearchUsage() throws Exception { final Session session = openSession(); final FullTextSession fts = Search.getFullTextSession(session); Transaction transaction = fts.beginTransaction(); int itemCount = 10000; for (int i = 0; i < itemCount; i++) { Insurance insurance = new Insurance(); insurance.setName("Macif"); fts.persist(insurance); } transaction.commit(); fts.clear(); transaction = fts.beginTransaction(); final QueryBuilder b = fts.getSearchFactory() .buildQueryBuilder() .forEntity(Insurance.class) .get(); final Query lq = b.keyword().onField("name").matching("Macif").createQuery(); final FullTextQuery ftq = fts.createFullTextQuery(lq, Insurance.class); ftq.initializeObjectsWith(ObjectLookupMethod.SKIP, DatabaseRetrievalMethod.FIND_BY_ID); final List<Insurance> results = ftq.list(); Assertions.assertThat(results).hasSize(itemCount); for (Insurance o : results) fts.delete(o); transaction.commit(); fts.close(); }
private void purgeAll(Class<?> entityType) { FullTextSession session = Search.getFullTextSession(openSession()); session.purgeAll(entityType); session.flushToIndexes(); @SuppressWarnings("unchecked") List<Insurance> list = session.createQuery("FROM " + entityType.getSimpleName()).list(); assertThat(list).hasSize(0); }
protected SearchFactoryImplementor getSearchFactoryImpl() { if (log.isDebugEnabled()) log.debug("SearchFactoryImplementor 를 생성합니다."); FullTextSession fts = Search.getFullTextSession(openSession()); fts.close(); SearchFactory searchFactory = fts.getSearchFactory(); return (SearchFactoryImplementor) searchFactory; }
private void reindexDependents(final HibernateTemplate hibernateTemplate, final Session session, final BaseDO< ? > obj, final Set<String> alreadyReindexed) { if (alreadyReindexed.contains(getReindexId(obj)) == true) { if (log.isDebugEnabled() == true) { log.debug("Object already re-indexed (skipping): " + getReindexId(obj)); } return; } session.flush(); // Needed to flush the object changes! final FullTextSession fullTextSession = Search.getFullTextSession(session); fullTextSession.setFlushMode(FlushMode.AUTO); fullTextSession.setCacheMode(CacheMode.IGNORE); try { BaseDO< ? > dbObj = (BaseDO< ? >) session.get(obj.getClass(), obj.getId()); if (dbObj == null) { dbObj = (BaseDO< ? >) session.load(obj.getClass(), obj.getId()); } fullTextSession.index(dbObj); alreadyReindexed.add(getReindexId(dbObj)); if (log.isDebugEnabled() == true) { log.debug("Object added to index: " + getReindexId(dbObj)); } } catch (final Exception ex) { // Don't fail if any exception while re-indexing occurs. log.info("Fail to re-index " + obj.getClass() + ": " + ex.getMessage()); } // session.flush(); // clear every batchSize since the queue is processed final List<Entry> entryList = map.get(obj.getClass()); reindexDependents(hibernateTemplate, session, obj, entryList, alreadyReindexed); }