code format
This commit is contained in:
parent
c5aaae5989
commit
9fd65e183a
10 changed files with 53 additions and 105 deletions
|
@ -42,13 +42,13 @@ import org.slf4j.LoggerFactory;
|
|||
public class SuggestApiManager extends BaseApiManager {
|
||||
private static final Logger logger = LoggerFactory.getLogger(SuggestApiManager.class);
|
||||
|
||||
@Resource
|
||||
protected DynamicProperties systemProperties;
|
||||
|
||||
public SuggestApiManager() {
|
||||
setPathPrefix("/suggest");
|
||||
}
|
||||
|
||||
@Resource
|
||||
protected DynamicProperties systemProperties;
|
||||
|
||||
@Override
|
||||
public boolean matches(final HttpServletRequest request) {
|
||||
final String servletPath = request.getServletPath();
|
||||
|
|
|
@ -62,9 +62,7 @@ public class BadWordService implements Serializable {
|
|||
|
||||
// update pager
|
||||
BeanUtil.copyBeanToBean(badWordList, badWordPager, option -> option.include(Constants.PAGER_CONVERSION_RULE));
|
||||
badWordPager.setPageNumberList(badWordList.pageRange(op -> {
|
||||
op.rangeSize(5);
|
||||
}).createPageNumberList());
|
||||
badWordPager.setPageNumberList(badWordList.pageRange(op -> op.rangeSize(5)).createPageNumberList());
|
||||
|
||||
return badWordList;
|
||||
}
|
||||
|
@ -75,17 +73,13 @@ public class BadWordService implements Serializable {
|
|||
|
||||
public void store(final BadWord badWord) {
|
||||
|
||||
badWordBhv.insertOrUpdate(badWord, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
badWordBhv.insertOrUpdate(badWord, op -> op.setRefresh(true));
|
||||
|
||||
}
|
||||
|
||||
public void delete(final BadWord badWord) {
|
||||
|
||||
badWordBhv.delete(badWord, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
badWordBhv.delete(badWord, op -> op.setRefresh(true));
|
||||
|
||||
}
|
||||
|
||||
|
@ -121,9 +115,8 @@ public class BadWordService implements Serializable {
|
|||
targetWord = targetWord.substring(2);
|
||||
}
|
||||
final String target = targetWord;
|
||||
BadWord badWord = badWordBhv.selectEntity(cb -> {
|
||||
cb.query().setSuggestWord_Equal(target);
|
||||
}).orElse(null);//TODO
|
||||
BadWord badWord = badWordBhv.selectEntity(cb ->
|
||||
cb.query().setSuggestWord_Equal(target)).orElse(null);//TODO
|
||||
final long now = ComponentUtil.getSystemHelper().getCurrentTimeAsLong();
|
||||
if (isDelete) {
|
||||
badWordBhv.delete(badWord);
|
||||
|
@ -158,9 +151,7 @@ public class BadWordService implements Serializable {
|
|||
list.add("BadWord");
|
||||
csvWriter.writeValues(list);
|
||||
|
||||
badWordBhv.selectCursor(cb -> {
|
||||
cb.query().matchAll();
|
||||
}, new EntityRowHandler<BadWord>() {
|
||||
badWordBhv.selectCursor(cb -> cb.query().matchAll(), new EntityRowHandler<BadWord>() {
|
||||
@Override
|
||||
public void handle(final BadWord entity) {
|
||||
final List<String> list = new ArrayList<>();
|
||||
|
|
|
@ -49,9 +49,8 @@ public class BoostDocumentRuleService implements Serializable {
|
|||
|
||||
// update pager
|
||||
BeanUtil.copyBeanToBean(boostDocumentRuleList, boostDocumentRulePager, option -> option.include(Constants.PAGER_CONVERSION_RULE));
|
||||
boostDocumentRulePager.setPageNumberList(boostDocumentRuleList.pageRange(op -> {
|
||||
op.rangeSize(5);
|
||||
}).createPageNumberList());
|
||||
boostDocumentRulePager.setPageNumberList(boostDocumentRuleList.pageRange(op ->
|
||||
op.rangeSize(5)).createPageNumberList());
|
||||
|
||||
return boostDocumentRuleList;
|
||||
}
|
||||
|
@ -62,17 +61,13 @@ public class BoostDocumentRuleService implements Serializable {
|
|||
|
||||
public void store(final BoostDocumentRule boostDocumentRule) {
|
||||
|
||||
boostDocumentRuleBhv.insertOrUpdate(boostDocumentRule, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
boostDocumentRuleBhv.insertOrUpdate(boostDocumentRule, op -> op.setRefresh(true));
|
||||
|
||||
}
|
||||
|
||||
public void delete(final BoostDocumentRule boostDocumentRule) {
|
||||
|
||||
boostDocumentRuleBhv.delete(boostDocumentRule, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
boostDocumentRuleBhv.delete(boostDocumentRule, op -> op.setRefresh(true));
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -79,9 +79,7 @@ public class CrawlingInfoService implements Serializable {
|
|||
|
||||
// update pager
|
||||
BeanUtil.copyBeanToBean(crawlingInfoList, crawlingInfoPager, option -> option.include(Constants.PAGER_CONVERSION_RULE));
|
||||
crawlingInfoPager.setPageNumberList(crawlingInfoList.pageRange(op -> {
|
||||
op.rangeSize(5);
|
||||
}).createPageNumberList());
|
||||
crawlingInfoPager.setPageNumberList(crawlingInfoList.pageRange(op -> op.rangeSize(5)).createPageNumberList());
|
||||
|
||||
return crawlingInfoList;
|
||||
}
|
||||
|
@ -93,18 +91,14 @@ public class CrawlingInfoService implements Serializable {
|
|||
public void store(final CrawlingInfo crawlingInfo) {
|
||||
setupStoreCondition(crawlingInfo);
|
||||
|
||||
crawlingInfoBhv.insertOrUpdate(crawlingInfo, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
crawlingInfoBhv.insertOrUpdate(crawlingInfo, op -> op.setRefresh(true));
|
||||
|
||||
}
|
||||
|
||||
public void delete(final CrawlingInfo crawlingInfo) {
|
||||
setupDeleteCondition(crawlingInfo);
|
||||
|
||||
crawlingInfoBhv.delete(crawlingInfo, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
crawlingInfoBhv.delete(crawlingInfo, op -> op.setRefresh(true));
|
||||
|
||||
}
|
||||
|
||||
|
@ -130,9 +124,7 @@ public class CrawlingInfoService implements Serializable {
|
|||
}
|
||||
|
||||
protected void setupDeleteCondition(final CrawlingInfo crawlingInfo) {
|
||||
crawlingInfoParamBhv.queryDelete(cb -> {
|
||||
cb.query().setCrawlingInfoId_Equal(crawlingInfo.getId());
|
||||
});
|
||||
crawlingInfoParamBhv.queryDelete(cb -> cb.query().setCrawlingInfoId_Equal(crawlingInfo.getId()));
|
||||
}
|
||||
|
||||
public void deleteSessionIdsBefore(final String activeSessionId, final String name, final long date) {
|
||||
|
@ -156,21 +148,13 @@ public class CrawlingInfoService implements Serializable {
|
|||
for (final CrawlingInfo cs : crawlingInfoList) {
|
||||
crawlingInfoIdList.add(cs.getId());
|
||||
}
|
||||
|
||||
crawlingInfoParamBhv.queryDelete(cb2 -> {
|
||||
cb2.query().setCrawlingInfoId_InScope(crawlingInfoIdList);
|
||||
});
|
||||
|
||||
crawlingInfoBhv.batchDelete(crawlingInfoList, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
crawlingInfoParamBhv.queryDelete(cb2 -> cb2.query().setCrawlingInfoId_InScope(crawlingInfoIdList));
|
||||
crawlingInfoBhv.batchDelete(crawlingInfoList, op -> op.setRefresh(true));
|
||||
}
|
||||
}
|
||||
|
||||
public CrawlingInfo get(final String sessionId) {
|
||||
return crawlingInfoBhv.selectEntity(cb -> {
|
||||
cb.query().setSessionId_Equal(sessionId);
|
||||
}).orElse(null);//TODO
|
||||
return crawlingInfoBhv.selectEntity(cb -> cb.query().setSessionId_Equal(sessionId)).orElse(null);//TODO
|
||||
}
|
||||
|
||||
public void storeInfo(final List<CrawlingInfoParam> crawlingInfoParamList) {
|
||||
|
@ -184,9 +168,7 @@ public class CrawlingInfoService implements Serializable {
|
|||
crawlingInfoParam.setCreatedTime(now);
|
||||
}
|
||||
}
|
||||
crawlingInfoParamBhv.batchInsert(crawlingInfoParamList, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
crawlingInfoParamBhv.batchInsert(crawlingInfoParamList, op -> op.setRefresh(true));
|
||||
}
|
||||
|
||||
public List<CrawlingInfoParam> getCrawlingInfoParamList(final String id) {
|
||||
|
@ -218,26 +200,18 @@ public class CrawlingInfoService implements Serializable {
|
|||
cb.specify().columnId();
|
||||
});
|
||||
final List<String> idList = activeSessionList.stream().map(session -> session.getId()).collect(Collectors.toList());
|
||||
crawlingInfoParamBhv.queryDelete(cb1 -> {
|
||||
cb1.query().filtered((cq, cf) -> {
|
||||
cq.matchAll();
|
||||
if (!idList.isEmpty()) {
|
||||
cf.not(subCf -> {
|
||||
subCf.setCrawlingInfoId_InScope(idList);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
crawlingInfoBhv.queryDelete(cb2 -> {
|
||||
cb2.query().filtered((cq, cf) -> {
|
||||
cq.matchAll();
|
||||
if (!idList.isEmpty()) {
|
||||
cf.not(subCf -> {
|
||||
subCf.setId_InScope(idList);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
crawlingInfoParamBhv.queryDelete(cb1 -> cb1.query().filtered((cq, cf) -> {
|
||||
cq.matchAll();
|
||||
if (!idList.isEmpty()) {
|
||||
cf.not(subCf -> subCf.setCrawlingInfoId_InScope(idList));
|
||||
}
|
||||
}));
|
||||
crawlingInfoBhv.queryDelete(cb2 -> cb2.query().filtered((cq, cf) -> {
|
||||
cq.matchAll();
|
||||
if (!idList.isEmpty()) {
|
||||
cf.not(subCf -> subCf.setId_InScope(idList));
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
public void importCsv(final Reader reader) {
|
||||
|
@ -258,9 +232,7 @@ public class CrawlingInfoService implements Serializable {
|
|||
crawlingInfo = new CrawlingInfo();
|
||||
crawlingInfo.setSessionId(list.get(0));
|
||||
crawlingInfo.setCreatedTime(formatter.parse(list.get(1)).getTime());
|
||||
crawlingInfoBhv.insert(crawlingInfo, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
crawlingInfoBhv.insert(crawlingInfo, op -> op.setRefresh(true));
|
||||
}
|
||||
|
||||
final CrawlingInfoParam entity = new CrawlingInfoParam();
|
||||
|
@ -268,9 +240,7 @@ public class CrawlingInfoService implements Serializable {
|
|||
entity.setKey(list.get(2));
|
||||
entity.setValue(list.get(3));
|
||||
entity.setCreatedTime(formatter.parse(list.get(4)).getTime());
|
||||
crawlingInfoParamBhv.insert(entity, op -> {
|
||||
op.setRefresh(true);
|
||||
});
|
||||
crawlingInfoParamBhv.insert(entity, op -> op.setRefresh(true));
|
||||
} catch (final Exception e) {
|
||||
logger.warn("Failed to read a click log: " + list, e);
|
||||
}
|
||||
|
@ -295,9 +265,7 @@ public class CrawlingInfoService implements Serializable {
|
|||
list.add("CreatedTime");
|
||||
csvWriter.writeValues(list);
|
||||
final DateTimeFormatter formatter = DateTimeFormatter.ofPattern(CoreLibConstants.DATE_FORMAT_ISO_8601_EXTEND);
|
||||
crawlingInfoParamBhv.selectCursor(cb -> {
|
||||
cb.query().matchAll();
|
||||
}, new EntityRowHandler<CrawlingInfoParam>() {
|
||||
crawlingInfoParamBhv.selectCursor(cb -> cb.query().matchAll(),new EntityRowHandler<CrawlingInfoParam>() {
|
||||
@Override
|
||||
public void handle(final CrawlingInfoParam entity) {
|
||||
final List<String> list = new ArrayList<>();
|
||||
|
@ -337,16 +305,10 @@ public class CrawlingInfoService implements Serializable {
|
|||
}
|
||||
|
||||
public void deleteBefore(final long date) {
|
||||
crawlingInfoBhv.selectBulk(cb -> {
|
||||
cb.query().setExpiredTime_LessThan(date);
|
||||
}, list -> {
|
||||
crawlingInfoBhv.selectBulk(cb -> cb.query().setExpiredTime_LessThan(date), list -> {
|
||||
final List<String> idList = list.stream().map(entity -> entity.getId()).collect(Collectors.toList());
|
||||
crawlingInfoParamBhv.queryDelete(cb1 -> {
|
||||
cb1.query().setCrawlingInfoId_InScope(idList);
|
||||
});
|
||||
crawlingInfoBhv.queryDelete(cb2 -> {
|
||||
cb2.query().setExpiredTime_LessThan(date);
|
||||
});
|
||||
crawlingInfoParamBhv.queryDelete(cb1 -> cb1.query().setCrawlingInfoId_InScope(idList));
|
||||
crawlingInfoBhv.queryDelete(cb2 -> cb2.query().setExpiredTime_LessThan(date));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -40,10 +40,6 @@ public class EditForm implements Serializable {
|
|||
@ValidateTypeFailure
|
||||
public Integer crudMode;
|
||||
|
||||
public String getCurrentPageNumber() {
|
||||
return pageNumber;
|
||||
}
|
||||
|
||||
@Required
|
||||
@Size(max = 1000)
|
||||
public String id;
|
||||
|
@ -68,6 +64,10 @@ public class EditForm implements Serializable {
|
|||
@Size(max = 1000)
|
||||
public String configId;
|
||||
|
||||
public String getCurrentPageNumber() {
|
||||
return pageNumber;
|
||||
}
|
||||
|
||||
public void initialize() {
|
||||
id = null;
|
||||
url = null;
|
||||
|
|
|
@ -57,15 +57,15 @@ public class ListForm implements SearchRequestParams, Serializable {
|
|||
|
||||
public String[] lang;
|
||||
|
||||
public Map<String, String[]> fields = new HashMap<>();
|
||||
|
||||
public String[] ex_q;
|
||||
|
||||
@Override
|
||||
public String getQuery() {
|
||||
return q;
|
||||
}
|
||||
|
||||
public Map<String, String[]> fields = new HashMap<>();
|
||||
|
||||
public String ex_q[];
|
||||
|
||||
@Override
|
||||
public String[] getExtraQueries() {
|
||||
return stream(ex_q).get(stream -> stream.filter(StringUtil::isNotBlank).distinct().toArray(n -> new String[n]));
|
||||
|
|
|
@ -50,7 +50,7 @@ public class SearchForm implements SearchRequestParams, Serializable {
|
|||
|
||||
public String[] lang;
|
||||
|
||||
public String ex_q[];
|
||||
public String[] ex_q;
|
||||
|
||||
@ValidateTypeFailure
|
||||
public Integer start;
|
||||
|
|
|
@ -127,8 +127,8 @@ public class SynonymFile extends DictionaryFile<SynonymItem> {
|
|||
continue; // ignore empty lines and comments
|
||||
}
|
||||
|
||||
String inputs[];
|
||||
String outputs[];
|
||||
String[] inputs;
|
||||
String[] outputs;
|
||||
|
||||
final List<String> sides = split(line, "=>");
|
||||
if (sides.size() > 1) { // explicit mapping
|
||||
|
|
|
@ -90,7 +90,7 @@ public class IntervalControlHelper {
|
|||
|
||||
protected long delay;
|
||||
|
||||
protected int days[];
|
||||
protected int[] days;
|
||||
|
||||
protected boolean reverse;
|
||||
|
||||
|
|
|
@ -359,11 +359,11 @@ public class FessMultipartRequestHandler implements MultipartRequestHandler {
|
|||
|
||||
protected String getBaseFileName(final String filePath) {
|
||||
final String fileName = new File(filePath).getName();
|
||||
int colonIndex = fileName.indexOf(":");
|
||||
int colonIndex = fileName.indexOf(':');
|
||||
if (colonIndex == -1) {
|
||||
colonIndex = fileName.indexOf("\\\\"); // Windows SMB
|
||||
}
|
||||
final int backslashIndex = fileName.lastIndexOf("\\");
|
||||
final int backslashIndex = fileName.lastIndexOf('\\');
|
||||
if (colonIndex > -1 && backslashIndex > -1) {
|
||||
return fileName.substring(backslashIndex + 1);
|
||||
} else {
|
||||
|
|
Loading…
Add table
Reference in a new issue