fix #53
This commit is contained in:
parent
9c114760b0
commit
7063263397
5 changed files with 45 additions and 35 deletions
|
@ -16,7 +16,7 @@
|
|||
|
||||
package jp.sf.fess.action.admin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
|
@ -40,11 +40,11 @@ public class CrawlingSessionAction extends BsCrawlingSessionAction {
|
|||
}
|
||||
|
||||
public List<CrawlingSessionInfo> getCrawlingSessionInfoItems() {
|
||||
if (crawlingSessionForm.sessionId != null) {
|
||||
return crawlingSessionService
|
||||
.getCrawlingSessionInfoList(crawlingSessionForm.sessionId);
|
||||
if (crawlingSessionForm.id != null) {
|
||||
return crawlingSessionService.getCrawlingSessionInfoList(Long
|
||||
.parseLong(crawlingSessionForm.id));
|
||||
}
|
||||
return new ArrayList<CrawlingSessionInfo>(0);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Execute(validator = false, input = "error.jsp")
|
||||
|
|
|
@ -288,7 +288,7 @@ public class Crawler implements Serializable {
|
|||
}
|
||||
|
||||
try {
|
||||
crawlingSessionHelper.store(options.sessionId);
|
||||
crawlingSessionHelper.store(options.sessionId, true);
|
||||
final String dayForCleanupStr;
|
||||
if (StringUtil.isNotBlank(options.expires)) {
|
||||
dayForCleanupStr = options.expires;
|
||||
|
@ -311,7 +311,7 @@ public class Crawler implements Serializable {
|
|||
return crawler.doCrawl(options);
|
||||
} finally {
|
||||
try {
|
||||
crawlingSessionHelper.store(options.sessionId);
|
||||
crawlingSessionHelper.store(options.sessionId, false);
|
||||
} catch (final Exception e) {
|
||||
logger.warn("Failed to store crawling information.", e);
|
||||
}
|
||||
|
|
|
@ -40,8 +40,12 @@ import org.apache.solr.client.solrj.response.QueryResponse;
|
|||
import org.codelibs.solr.lib.SolrGroup;
|
||||
import org.seasar.framework.container.SingletonS2Container;
|
||||
import org.seasar.framework.util.StringUtil;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CrawlingSessionHelper implements Serializable {
|
||||
private static final Logger logger = LoggerFactory
|
||||
.getLogger(CrawlingSessionHelper.class);
|
||||
|
||||
public static final String FACET_COUNT_KEY = "count";
|
||||
|
||||
|
@ -67,9 +71,9 @@ public class CrawlingSessionHelper implements Serializable {
|
|||
return sessionId;
|
||||
}
|
||||
|
||||
public synchronized void store(final String sessionId) {
|
||||
CrawlingSession crawlingSession = getCrawlingSessionService().get(
|
||||
sessionId);
|
||||
public synchronized void store(final String sessionId, final boolean create) {
|
||||
CrawlingSession crawlingSession = create ? null
|
||||
: getCrawlingSessionService().getLast(sessionId);
|
||||
if (crawlingSession == null) {
|
||||
crawlingSession = new CrawlingSession(sessionId);
|
||||
try {
|
||||
|
@ -105,10 +109,11 @@ public class CrawlingSessionHelper implements Serializable {
|
|||
|
||||
public void updateParams(final String sessionId, final String name,
|
||||
final int dayForCleanup) {
|
||||
CrawlingSession crawlingSession = getCrawlingSessionService().get(
|
||||
sessionId);
|
||||
final CrawlingSession crawlingSession = getCrawlingSessionService()
|
||||
.getLast(sessionId);
|
||||
if (crawlingSession == null) {
|
||||
crawlingSession = new CrawlingSession(sessionId);
|
||||
logger.warn("No crawling session: " + sessionId);
|
||||
return;
|
||||
}
|
||||
if (StringUtil.isNotBlank(name)) {
|
||||
crawlingSession.setName(name);
|
||||
|
@ -138,7 +143,7 @@ public class CrawlingSessionHelper implements Serializable {
|
|||
|
||||
public Map<String, String> getInfoMap(final String sessionId) {
|
||||
final List<CrawlingSessionInfo> crawlingSessionInfoList = getCrawlingSessionService()
|
||||
.getCrawlingSessionInfoList(sessionId);
|
||||
.getLastCrawlingSessionInfoList(sessionId);
|
||||
final Map<String, String> map = new HashMap<String, String>();
|
||||
for (final CrawlingSessionInfo crawlingSessionInfo : crawlingSessionInfoList) {
|
||||
map.put(crawlingSessionInfo.getKey(),
|
||||
|
|
|
@ -21,19 +21,13 @@ import java.util.Date;
|
|||
|
||||
import jp.sf.fess.Constants;
|
||||
import jp.sf.fess.FessSystemException;
|
||||
import jp.sf.fess.helper.CrawlingSessionHelper;
|
||||
import jp.sf.fess.helper.SystemHelper;
|
||||
import jp.sf.fess.job.JobExecutor.ShutdownListener;
|
||||
|
||||
import org.seasar.framework.container.SingletonS2Container;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CrawlJob {
|
||||
|
||||
private static final Logger logger = LoggerFactory
|
||||
.getLogger(CrawlJob.class);
|
||||
|
||||
protected int documentExpires = -2;
|
||||
|
||||
public String execute(final JobExecutor jobExecutor) {
|
||||
|
@ -98,11 +92,7 @@ public class CrawlJob {
|
|||
});
|
||||
}
|
||||
|
||||
// store crawling session
|
||||
final CrawlingSessionHelper crawlingSessionHelper = SingletonS2Container
|
||||
.getComponent("crawlingSessionHelper");
|
||||
try {
|
||||
crawlingSessionHelper.store(sessionId);
|
||||
SingletonS2Container.getComponent(SystemHelper.class)
|
||||
.executeCrawler(sessionId, webConfigIds, fileConfigIds,
|
||||
dataConfigIds, operation, documentExpires);
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.Serializable;
|
|||
import java.io.Writer;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
@ -46,6 +47,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.codelibs.core.CoreLibConstants;
|
||||
import org.seasar.dbflute.bhv.DeleteOption;
|
||||
import org.seasar.dbflute.cbean.EntityRowHandler;
|
||||
import org.seasar.dbflute.cbean.ListResultBean;
|
||||
import org.seasar.dbflute.cbean.coption.LikeSearchOption;
|
||||
import org.seasar.framework.util.StringUtil;
|
||||
|
||||
|
@ -144,10 +146,21 @@ public class CrawlingSessionService extends BsCrawlingSessionService implements
|
|||
crawlingSessionInfoBhv.batchInsert(crawlingSessionInfoList);
|
||||
}
|
||||
|
||||
public List<CrawlingSessionInfo> getCrawlingSessionInfoList(
|
||||
final String sessionId) {
|
||||
public List<CrawlingSessionInfo> getCrawlingSessionInfoList(final Long id) {
|
||||
final CrawlingSessionInfoCB cb = new CrawlingSessionInfoCB();
|
||||
cb.query().queryCrawlingSession().setSessionId_Equal(sessionId);
|
||||
cb.query().queryCrawlingSession().setId_Equal(id);
|
||||
cb.query().addOrderBy_Id_Asc();
|
||||
return crawlingSessionInfoBhv.selectList(cb);
|
||||
}
|
||||
|
||||
public List<CrawlingSessionInfo> getLastCrawlingSessionInfoList(
|
||||
final String sessionId) {
|
||||
final CrawlingSession crawlingSession = getLast(sessionId);
|
||||
if (crawlingSession == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
final CrawlingSessionInfoCB cb = new CrawlingSessionInfoCB();
|
||||
cb.query().setCrawlingSessionId_Equal(crawlingSession.getId());
|
||||
cb.query().addOrderBy_Id_Asc();
|
||||
return crawlingSessionInfoBhv.selectList(cb);
|
||||
}
|
||||
|
@ -212,6 +225,7 @@ public class CrawlingSessionService extends BsCrawlingSessionService implements
|
|||
final CsvConfig cfg = new CsvConfig(',', '"', '"');
|
||||
cfg.setEscapeDisabled(false);
|
||||
cfg.setQuoteDisabled(false);
|
||||
@SuppressWarnings("resource")
|
||||
final CsvWriter csvWriter = new CsvWriter(writer, cfg);
|
||||
final CrawlingSessionInfoCB cb = new CrawlingSessionInfoCB();
|
||||
cb.setupSelect_CrawlingSession();
|
||||
|
@ -278,16 +292,17 @@ public class CrawlingSessionService extends BsCrawlingSessionService implements
|
|||
new DeleteOption<CrawlingSessionCB>().allowNonQueryDelete());
|
||||
}
|
||||
|
||||
public List<String> getExpiredSessionIdList(
|
||||
final List<String> expiredSessionIdList) {
|
||||
public CrawlingSession getLast(final String sessionId) {
|
||||
final CrawlingSessionCB cb = new CrawlingSessionCB();
|
||||
cb.query().setSessionId_InScope(expiredSessionIdList);
|
||||
cb.specify().columnSessionId();
|
||||
final List<CrawlingSession> list = crawlingSessionBhv.selectList(cb);
|
||||
for (final CrawlingSession crawlingSession : list) {
|
||||
expiredSessionIdList.remove(crawlingSession.getSessionId());
|
||||
cb.query().setSessionId_Equal(sessionId);
|
||||
cb.query().addOrderBy_CreatedTime_Desc();
|
||||
cb.fetchFirst(1);
|
||||
final ListResultBean<CrawlingSession> list = crawlingSessionBhv
|
||||
.selectList(cb);
|
||||
if (list.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return expiredSessionIdList;
|
||||
return list.get(0);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue