modify notification mail and minor fixes

This commit is contained in:
Shinsuke Sugaya 2015-12-30 17:35:41 +09:00
parent 5adaf030a3
commit 1f45f49a69
12 changed files with 82 additions and 87 deletions

View file

@ -66,8 +66,7 @@ public class EsApiManager extends BaseApiManager {
final String servletPath = request.getServletPath();
if (servletPath.startsWith(pathPrefix)) {
final RequestManager requestManager = ComponentUtil.getRequestManager();
return requestManager.findUserBean(FessUserBean.class).map(user -> user.hasRoles(acceptedRoles)).orElseGet(() -> Boolean.FALSE)
.booleanValue();
return requestManager.findUserBean(FessUserBean.class).map(user -> user.hasRoles(acceptedRoles)).orElse(Boolean.FALSE);
}
return false;
}
@ -138,9 +137,7 @@ public class EsApiManager extends BaseApiManager {
public String getServerPath() {
return getSessionManager().getAttribute(Constants.ES_API_ACCESS_TOKEN, String.class).map(token -> ADMIN_SERVER + token)
.orElseGet(() -> {
throw new FessSystemException("Cannot create an access token.");
});
.orElseThrow(() -> new FessSystemException("Cannot create an access token."));
}
public void saveToken() {

View file

@ -42,6 +42,7 @@ import org.codelibs.fess.es.config.exbhv.CrawlingInfoParamBhv;
import org.codelibs.fess.es.config.exentity.CrawlingInfo;
import org.codelibs.fess.es.config.exentity.CrawlingInfoParam;
import org.codelibs.fess.exception.FessSystemException;
import org.codelibs.fess.mylasta.direction.FessConfig;
import org.codelibs.fess.util.ComponentUtil;
import org.dbflute.bhv.readable.EntityRowHandler;
import org.dbflute.cbean.result.ListResultBean;
@ -196,9 +197,11 @@ public class CrawlingInfoService implements Serializable {
if (crawlingInfo == null) {
return Collections.emptyList();
}
final FessConfig fessConfig = ComponentUtil.getFessConfig();
return crawlingInfoParamBhv.selectList(cb -> {
cb.query().setCrawlingInfoId_Equal(crawlingInfo.getId());
cb.query().addOrderBy_Id_Asc();
cb.paging(fessConfig.getPageCrawlingInfoParamMaxFetchSizeAsInteger().intValue(), 1);
});
}

View file

@ -44,7 +44,7 @@ public class KuromojiService {
kuromojiPager.setPageNumberList(kuromojiList.createPageNumberList());
return (List<KuromojiItem>) kuromojiList;
}).orElseGet(() -> Collections.emptyList());
}).orElse(Collections.emptyList());
}
public OptionalEntity<KuromojiFile> getKuromojiFile(final String dictId) {

View file

@ -44,7 +44,7 @@ public class SynonymService {
synonymPager.setPageNumberList(synonymList.createPageNumberList());
return (List<SynonymItem>) synonymList;
}).orElseGet(() -> Collections.emptyList());
}).orElse(Collections.emptyList());
}
public OptionalEntity<SynonymFile> getSynonymFile(final String dictId) {

View file

@ -286,6 +286,8 @@ public class Crawler implements Serializable {
// ignore
}
logger.debug("\ninfoMap: {}\ndataMap: {}", infoMap, dataMap);
final FessConfig fessConfig = ComponentUtil.getFessConfig();
final Postbox postbox = ComponentUtil.getComponent(Postbox.class);
CrawlerPostcard.droppedInto(postbox, postcard -> {
@ -294,24 +296,21 @@ public class Crawler implements Serializable {
StreamUtil.of(toAddresses).forEach(address -> {
postcard.addTo(address);
});
postcard.setCommitEndTime(getValueOrEmpty(dataMap, "commitEndTime"));
postcard.setCommitExecTime(getValueOrEmpty(dataMap, "commitExecTime"));
postcard.setCommitStartTime(getValueOrEmpty(dataMap, "commitStartTime"));
postcard.setCrawlerEndTime(getValueOrEmpty(dataMap, "crawlerEndTime"));
postcard.setCrawlerExecTime(getValueOrEmpty(dataMap, "crawlerExecTime"));
postcard.setCrawlerStartTime(getValueOrEmpty(dataMap, "crawlerStartTime"));
postcard.setDataCrawlEndTime(getValueOrEmpty(dataMap, "dataCrawlEndTime"));
postcard.setDataCrawlExecTime(getValueOrEmpty(dataMap, "dataCrawlExecTime"));
postcard.setDataCrawlStartTime(getValueOrEmpty(dataMap, "dataCrawlStartTime"));
postcard.setDataFsIndexSize(getValueOrEmpty(dataMap, "dataFsIndexSize"));
postcard.setDataIndexExecTime(getValueOrEmpty(dataMap, "dataIndexExecTime"));
postcard.setHostname(getValueOrEmpty(dataMap, "hostname"));
postcard.setWebFsCrawlEndTime(getValueOrEmpty(dataMap, "webFsCrawlEndTime"));
postcard.setWebFsCrawlExecTime(getValueOrEmpty(dataMap, "webFsCrawlExecTime"));
postcard.setWebFsCrawlStartTime(getValueOrEmpty(dataMap, "webFsCrawlStartTime"));
postcard.setWebFsIndexExecTime(getValueOrEmpty(dataMap, "webFsIndexExecTime"));
postcard.setWebFsIndexSize(getValueOrEmpty(dataMap, "webFsIndexSize"));
if (Constants.T.equals(infoMap.get(Constants.CRAWLER_STATUS))) {
postcard.setCrawlerEndTime(getValueFromMap(dataMap, "crawlerEndTime", StringUtil.EMPTY));
postcard.setCrawlerExecTime(getValueFromMap(dataMap, "crawlerExecTime", "0"));
postcard.setCrawlerStartTime(getValueFromMap(dataMap, "crawlerStartTime", StringUtil.EMPTY));
postcard.setDataCrawlEndTime(getValueFromMap(dataMap, "dataCrawlEndTime", StringUtil.EMPTY));
postcard.setDataCrawlExecTime(getValueFromMap(dataMap, "dataCrawlExecTime", "0"));
postcard.setDataCrawlStartTime(getValueFromMap(dataMap, "dataCrawlStartTime", StringUtil.EMPTY));
postcard.setDataFsIndexSize(getValueFromMap(dataMap, "dataFsIndexSize", "0"));
postcard.setDataIndexExecTime(getValueFromMap(dataMap, "dataIndexExecTime", "0"));
postcard.setHostname(getValueFromMap(dataMap, "hostname", StringUtil.EMPTY));
postcard.setWebFsCrawlEndTime(getValueFromMap(dataMap, "webFsCrawlEndTime", StringUtil.EMPTY));
postcard.setWebFsCrawlExecTime(getValueFromMap(dataMap, "webFsCrawlExecTime", "0"));
postcard.setWebFsCrawlStartTime(getValueFromMap(dataMap, "webFsCrawlStartTime", StringUtil.EMPTY));
postcard.setWebFsIndexExecTime(getValueFromMap(dataMap, "webFsIndexExecTime", "0"));
postcard.setWebFsIndexSize(getValueFromMap(dataMap, "webFsIndexSize", "0"));
if (Constants.TRUE.equalsIgnoreCase(infoMap.get(Constants.CRAWLER_STATUS))) {
postcard.setStatus(Constants.OK);
} else {
postcard.setStatus(Constants.FAIL);
@ -320,10 +319,10 @@ public class Crawler implements Serializable {
}
}
private String getValueOrEmpty(Map<String, String> dataMap, String key) {
private String getValueFromMap(Map<String, String> dataMap, String key, String defaultValue) {
String value = dataMap.get(key);
if (value == null) {
return StringUtil.EMPTY;
if (StringUtil.isBlank(value)) {
return defaultValue;
}
return value;
}

View file

@ -99,6 +99,7 @@ public class CrawlingInfoHelper implements Serializable {
if (infoMap == null) {
infoMap = Collections.synchronizedMap(new LinkedHashMap<String, String>());
}
logger.debug("infoMap: {}={} => {}", key, value, infoMap);
infoMap.put(key, value);
}

View file

@ -115,7 +115,7 @@ public class IndexingHelper {
return fessEsClient.getDocument(fessConfig.getIndexDocumentSearchIndex(), fessConfig.getIndexDocumentType(), id,
requestBuilder -> {
return true;
}).orElseGet(() -> null);
}).orElse(null);
}
public List<Map<String, Object>> getDocumentListByPrefixId(final FessEsClient fessEsClient, final String id, final String[] fields) {

View file

@ -181,7 +181,7 @@ public class SystemHelper implements Serializable {
}
}
return getDefaultHelpLink(url);
}).orElseGet(() -> getDefaultHelpLink(url));
}).orElse(getDefaultHelpLink(url));
}
private String getDefaultHelpLink(final String url) {

View file

@ -318,6 +318,9 @@ public interface FessConfig extends FessEnv, org.codelibs.fess.mylasta.direction
/** The key of the configuration. e.g. 1000 */
String PAGE_GROUP_MAX_FETCH_SIZE = "page.group.max.fetch.size";
/** The key of the configuration. e.g. 100 */
String PAGE_CRAWLING_INFO_PARAM_MAX_FETCH_SIZE = "page.crawling.info.param.max.fetch.size";
/** The key of the configuration. e.g. 0 */
String PAGING_SEARCH_PAGE_START = "paging.search.page.start";
@ -1406,7 +1409,7 @@ public interface FessConfig extends FessEnv, org.codelibs.fess.mylasta.direction
/**
* Get the value for the key 'page.docboost.max.fetch.size'. <br>
* The value is, e.g. 1000 <br>
* comment: max page size
* comment: fetch page size
* @return The value of found property. (NotNull: if not found, exception but basically no way)
*/
String getPageDocboostMaxFetchSize();
@ -1414,7 +1417,7 @@ public interface FessConfig extends FessEnv, org.codelibs.fess.mylasta.direction
/**
* Get the value for the key 'page.docboost.max.fetch.size' as {@link Integer}. <br>
* The value is, e.g. 1000 <br>
* comment: max page size
* comment: fetch page size
* @return The value of found property. (NotNull: if not found, exception but basically no way)
* @throws NumberFormatException When the property is not integer.
*/
@ -1495,6 +1498,21 @@ public interface FessConfig extends FessEnv, org.codelibs.fess.mylasta.direction
*/
Integer getPageGroupMaxFetchSizeAsInteger();
/**
* Get the value for the key 'page.crawling.info.param.max.fetch.size'. <br>
* The value is, e.g. 100 <br>
* @return The value of found property. (NotNull: if not found, exception but basically no way)
*/
String getPageCrawlingInfoParamMaxFetchSize();
/**
* Get the value for the key 'page.crawling.info.param.max.fetch.size' as {@link Integer}. <br>
* The value is, e.g. 100 <br>
* @return The value of found property. (NotNull: if not found, exception but basically no way)
* @throws NumberFormatException When the property is not integer.
*/
Integer getPageCrawlingInfoParamMaxFetchSizeAsInteger();
/**
* Get the value for the key 'paging.search.page.start'. <br>
* The value is, e.g. 0 <br>
@ -2493,6 +2511,14 @@ public interface FessConfig extends FessEnv, org.codelibs.fess.mylasta.direction
return getAsInteger(FessConfig.PAGE_GROUP_MAX_FETCH_SIZE);
}
public String getPageCrawlingInfoParamMaxFetchSize() {
return get(FessConfig.PAGE_CRAWLING_INFO_PARAM_MAX_FETCH_SIZE);
}
public Integer getPageCrawlingInfoParamMaxFetchSizeAsInteger() {
return getAsInteger(FessConfig.PAGE_CRAWLING_INFO_PARAM_MAX_FETCH_SIZE);
}
public String getPagingSearchPageStart() {
return get(FessConfig.PAGING_SEARCH_PAGE_START);
}

View file

@ -52,7 +52,7 @@ public class CrawlerPostcard extends LaTypicalPostcard {
protected String[] getPropertyNames() {
return new String[] { "hostname", "webFsCrawlStartTime", "webFsCrawlEndTime", "webFsCrawlExecTime", "webFsIndexExecTime",
"webFsIndexSize", "dataCrawlStartTime", "dataCrawlEndTime", "dataCrawlExecTime", "dataIndexExecTime", "dataFsIndexSize",
"commitStartTime", "commitEndTime", "commitExecTime", "crawlerStartTime", "crawlerEndTime", "crawlerExecTime", "status" };
"crawlerStartTime", "crawlerEndTime", "crawlerExecTime", "status" };
}
// ===================================================================================
@ -199,33 +199,6 @@ public class CrawlerPostcard extends LaTypicalPostcard {
registerVariable("dataFsIndexSize", dataFsIndexSize);
}
/**
* Set the value of commitStartTime, used in parameter comment. <br>
* Even if empty string, treated as empty plainly. So "IF pmb != null" is false if empty.
* @param commitStartTime The parameter value of commitStartTime. (NotNull)
*/
public void setCommitStartTime(String commitStartTime) {
registerVariable("commitStartTime", commitStartTime);
}
/**
* Set the value of commitEndTime, used in parameter comment. <br>
* Even if empty string, treated as empty plainly. So "IF pmb != null" is false if empty.
* @param commitEndTime The parameter value of commitEndTime. (NotNull)
*/
public void setCommitEndTime(String commitEndTime) {
registerVariable("commitEndTime", commitEndTime);
}
/**
* Set the value of commitExecTime, used in parameter comment. <br>
* Even if empty string, treated as empty plainly. So "IF pmb != null" is false if empty.
* @param commitExecTime The parameter value of commitExecTime. (NotNull)
*/
public void setCommitExecTime(String commitExecTime) {
registerVariable("commitExecTime", commitExecTime);
}
/**
* Set the value of crawlerStartTime, used in parameter comment. <br>
* Even if empty string, treated as empty plainly. So "IF pmb != null" is false if empty.

View file

@ -177,13 +177,14 @@ paging.page.range.size = 3
# The option 'fillLimit' of page range for paging
paging.page.range.fill.limit = true
# max page size
# fetch page size
page.docboost.max.fetch.size=1000
page.keymatch.max.fetch.size=1000
page.labeltype.max.fetch.size=1000
page.roletype.max.fetch.size=1000
page.role.max.fetch.size=1000
page.group.max.fetch.size=1000
page.crawling.info.param.max.fetch.size=100
# search page
paging.search.page.start=0

View file

@ -6,33 +6,28 @@ subject: [FESS] Crawler completed: /*pmb.hostname*/
>>>
--- Server Info ---
Host Name: /*IF pmb.hostname != null*//*pmb.hostname*//*END*//*IF pmb.hostname == null*/Unknown/*END*/
/*IF pmb.webFsIndexSize != null*/
--- Web/FileSystem Crawler ---/*IF pmb.webFsCrawlStartTime != null*/
Start Time: /*pmb.webFsCrawlStartTime*//*END*//*IF pmb.webFsCrawlEndTime != null*/
End Time: /*pmb.webFsCrawlEndTime*//*END*//*IF pmb.webFsCrawlExecTime != null*/
Exec Time: /*pmb.webFsCrawlExecTime*/ms/*END*/
--- Web/FileSystem Indexer ---/*IF pmb.webFsIndexExecTime != null*/
Exec Time: /*pmb.webFsIndexExecTime*//*END*//*IF pmb.webFsIndexSize != null*/
Num of Doc: /*pmb.webFsIndexSize*/ docs/*END*/
/*END*//*IF pmb.dataFsIndexSize != null*/
--- Data Store Crawler ---/*IF pmb.dataCrawlStartTime != null*/
Start Time: /*pmb.dataCrawlStartTime*//*END*//*IF pmb.dataCrawlEndTime != null*/
End Time: /*pmb.dataCrawlEndTime*//*END*//*IF pmb.dataCrawlExecTime != null*/
Exec Time: /*pmb.dataCrawlExecTime*/ms/*END*/
--- Web/FileSystem Crawler ---
Start Time: /*pmb.webFsCrawlStartTime*/
End Time: /*pmb.webFsCrawlEndTime*/
Exec Time: /*pmb.webFsCrawlExecTime*/ms
--- Data Store Indexer ---/*IF pmb.dataIndexExecTime != null*/
Exec Time: /*pmb.dataIndexExecTime*//*END*//*IF pmb.dataFsIndexSize != null*/
Num of Doc: /*pmb.dataFsIndexSize*/ docs/*END*/
/*END*//*IF pmb.commitExecTime != null*/
--- Indexer(Commit) ---/*IF pmb.commitStartTime != null*/
Start Time: /*pmb.commitStartTime*//*END*//*IF pmb.commitEndTime != null*/
End Time: /*pmb.commitEndTime*//*END*//*IF pmb.commitExecTime != null*/
Exec Time: /*pmb.commitExecTime*/ms/*END*/
/*END*/
--- Total ---/*IF pmb.crawlerStartTime != null*/
Start Time: /*pmb.crawlerStartTime*//*END*//*IF pmb.crawlerEndTime != null*/
End Time: /*pmb.crawlerEndTime*//*END*//*IF pmb.crawlerExecTime != null*/
Exec Time: /*pmb.crawlerExecTime*/ms/*END*/
--- Web/FileSystem Indexer ---
Exec Time: /*pmb.webFsIndexExecTime*/
Num of Doc: /*pmb.webFsIndexSize*/ docs
--- Data Store Crawler ---
Start Time: /*pmb.dataCrawlStartTime*/
End Time: /*pmb.dataCrawlEndTime*/
Exec Time: /*pmb.dataCrawlExecTime*/ms
--- Data Store Indexer ---
Exec Time: /*pmb.dataIndexExecTime*/
Num of Doc: /*pmb.dataFsIndexSize*/ docs
--- Total ---
Start Time: /*pmb.crawlerStartTime*/
End Time: /*pmb.crawlerEndTime*/
Exec Time: /*pmb.crawlerExecTime*/ms
Status: /*pmb.status*/