formatting with {} instead of string concatenation
This commit is contained in:
parent
e344c3cbcc
commit
338be397ed
44 changed files with 120 additions and 121 deletions
|
@ -63,7 +63,7 @@ public class EsApiManager extends BaseApiManager {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
ComponentUtil.getWebApiManagerFactory().add(this);
|
||||
}
|
||||
|
@ -179,7 +179,7 @@ public class EsApiManager extends BaseApiManager {
|
|||
} catch (final ClientAbortException e) {
|
||||
logger.debug("Client aborts this request.", e);
|
||||
} catch (final IOException e) {
|
||||
logger.error("Failed to read " + path + " from " + filePath);
|
||||
logger.error("Failed to read {} from {}", path, filePath);
|
||||
throw new WebApiException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
|
||||
}
|
||||
} else {
|
||||
|
@ -189,7 +189,7 @@ public class EsApiManager extends BaseApiManager {
|
|||
} catch (final ClientAbortException e) {
|
||||
logger.debug("Client aborts this request.", e);
|
||||
} catch (final IOException e) {
|
||||
logger.error("Failed to read " + path + " from " + filePath);
|
||||
logger.error("Failed to read {} from {}",path, filePath);
|
||||
throw new WebApiException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ public class GsaApiManager extends BaseApiManager implements WebApiManager {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
ComponentUtil.getWebApiManagerFactory().add(this);
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class JsonApiManager extends BaseJsonApiManager {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
ComponentUtil.getWebApiManagerFactory().add(this);
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class SuggestApiManager extends BaseJsonApiManager {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
ComponentUtil.getWebApiManagerFactory().add(this);
|
||||
}
|
||||
|
|
|
@ -58,13 +58,13 @@ public class ScriptExecutorJob implements LaJob {
|
|||
final String id = scheduledJob.getId();
|
||||
final String target = scheduledJob.getTarget();
|
||||
if (!ComponentUtil.getFessConfig().isSchedulerTarget(target)) {
|
||||
logger.info("Ignore Job " + id + ":" + scheduledJob.getName() + " because of not target: " + scheduledJob.getTarget());
|
||||
logger.info("Ignore Job {}:{} because of not target: {}", scheduledJob.getName(), id, scheduledJob.getTarget());
|
||||
return;
|
||||
}
|
||||
|
||||
final JobHelper jobHelper = ComponentUtil.getJobHelper();
|
||||
if (!jobHelper.isAvailable(id)) {
|
||||
logger.info("Job " + id + " is unavailable. Unregistering this job.");
|
||||
logger.info("Job {} is unavailable. Unregistering this job.", id);
|
||||
jobHelper.unregister(scheduledJob);
|
||||
return;
|
||||
}
|
||||
|
@ -95,17 +95,17 @@ public class ScriptExecutorJob implements LaJob {
|
|||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Starting Job {}. scriptType: {}, script: {}", id, scriptType, script);
|
||||
} else if (scheduledJob.isLoggingEnabled() && logger.isInfoEnabled()) {
|
||||
logger.info("Starting Job " + id + ".");
|
||||
logger.info("Starting Job {}.", id);
|
||||
}
|
||||
|
||||
final Object ret = jobExecutor.execute(script);
|
||||
if (ret == null) {
|
||||
if (scheduledJob.isLoggingEnabled() && logger.isInfoEnabled()) {
|
||||
logger.info("Finished Job " + id + ".");
|
||||
logger.info("Finished Job {}.", id);
|
||||
}
|
||||
} else {
|
||||
if (scheduledJob.isLoggingEnabled() && logger.isInfoEnabled()) {
|
||||
logger.info("Finished Job " + id + ". The return value is:\n" + ret);
|
||||
logger.info("Finished Job {}. The return value is:\n{}", id, ret);
|
||||
}
|
||||
jobLog.setScriptResult(ret.toString());
|
||||
}
|
||||
|
|
|
@ -166,7 +166,7 @@ public class AdminDesignAction extends FessAdminAction {
|
|||
|
||||
final File parentFile = uploadFile.getParentFile();
|
||||
if (!parentFile.exists() && !parentFile.mkdirs()) {
|
||||
logger.warn("Could not create " + parentFile.getAbsolutePath());
|
||||
logger.warn("Could not create {}", parentFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -101,7 +101,7 @@ public class AdminEsreqAction extends FessAdminAction {
|
|||
CopyUtil.copy(in, tempFile);
|
||||
} catch (final Exception e1) {
|
||||
if (tempFile != null && tempFile.exists() && !tempFile.delete()) {
|
||||
logger.warn("Failed to delete " + tempFile.getAbsolutePath());
|
||||
logger.warn("Failed to delete {}", tempFile.getAbsolutePath());
|
||||
}
|
||||
throw e1;
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ public class AdminEsreqAction extends FessAdminAction {
|
|||
out.write(in);
|
||||
} finally {
|
||||
if (tempFile.exists() && !tempFile.delete()) {
|
||||
logger.warn("Failed to delete " + tempFile.getAbsolutePath());
|
||||
logger.warn("Failed to delete {}", tempFile.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -124,13 +124,13 @@ public class AdminMaintenanceAction extends FessAdminAction {
|
|||
.execute(
|
||||
ActionListener.wrap(
|
||||
res -> {
|
||||
logger.info("Close " + docIndex);
|
||||
logger.info("Close {}", docIndex);
|
||||
fessEsClient
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareOpen(docIndex)
|
||||
.execute(
|
||||
ActionListener.wrap(res2 -> logger.info("Open " + docIndex),
|
||||
ActionListener.wrap(res2 -> logger.info("Open {}", docIndex),
|
||||
e -> logger.warn("Failed to open " + docIndex, e)));
|
||||
}, e -> logger.warn("Failed to close " + docIndex, e)));
|
||||
saveInfo(messages -> messages.addSuccessStartedDataUpdate(GLOBAL));
|
||||
|
@ -311,7 +311,7 @@ public class AdminMaintenanceAction extends FessAdminAction {
|
|||
fessEsClient.addMapping(docIndex, "doc", toIndex);
|
||||
fessEsClient.reindex(fromIndex, toIndex, replaceAliases);
|
||||
if (replaceAliases && !fessEsClient.updateAlias(toIndex)) {
|
||||
logger.warn("Failed to update aliases for " + fromIndex + " and " + toIndex);
|
||||
logger.warn("Failed to update aliases for {} and {}", fromIndex, toIndex);
|
||||
}
|
||||
}, e -> logger.warn("Failed to reindex from " + fromIndex + " to " + toIndex, e)));
|
||||
return true;
|
||||
|
@ -320,4 +320,4 @@ public class AdminMaintenanceAction extends FessAdminAction {
|
|||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ public class CommandChain implements AuthenticationChain {
|
|||
}
|
||||
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Command: " + String.join(" ", commands));
|
||||
logger.info("Command: {}", String.join(" ", commands));
|
||||
}
|
||||
|
||||
final String[] cmds = stream(commands).get(stream -> stream.map(s -> {
|
||||
|
@ -134,7 +134,7 @@ public class CommandChain implements AuthenticationChain {
|
|||
final int exitValue = currentProcess.exitValue();
|
||||
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Exit Code: " + exitValue + " - Process Output:\n" + it.getOutput());
|
||||
logger.info("Exit Code: {} - Process Output:\n{}", exitValue, it.getOutput());
|
||||
}
|
||||
if (exitValue == 143 && mt.isTeminated()) {
|
||||
throw new CommandExecutionException("The command execution is timeout: " + String.join(" ", commands));
|
||||
|
|
|
@ -105,7 +105,7 @@ public abstract class AbstractFessFileTransformer extends AbstractTransformer im
|
|||
return null;
|
||||
}
|
||||
if (getLogger().isDebugEnabled()) {
|
||||
getLogger().debug("ExtractData: " + extractData);
|
||||
getLogger().debug("ExtractData: {}", extractData);
|
||||
}
|
||||
// meta
|
||||
extractData
|
||||
|
@ -154,7 +154,7 @@ public abstract class AbstractFessFileTransformer extends AbstractTransformer im
|
|||
if (dt != null) {
|
||||
dataMap.put(mapping.getValue1(), FessFunctions.formatDate(dt));
|
||||
} else {
|
||||
logger.warn("Failed to parse " + mapping.toString());
|
||||
logger.warn("Failed to parse {}", mapping.toString());
|
||||
}
|
||||
} else {
|
||||
logger.warn("Unknown mapping type: {}={}", key, mapping);
|
||||
|
|
|
@ -235,14 +235,14 @@ public class FessXpathTransformer extends XpathTransformer implements FessTransf
|
|||
}
|
||||
}
|
||||
if (noindex && nofollow) {
|
||||
logger.info("META(robots=noindex,nofollow): " + responseData.getUrl());
|
||||
logger.info("META(robots=noindex,nofollow): {}", responseData.getUrl());
|
||||
throw new ChildUrlsException(Collections.emptySet(), "#processMetaRobots");
|
||||
} else if (noindex) {
|
||||
logger.info("META(robots=noindex): " + responseData.getUrl());
|
||||
logger.info("META(robots=noindex): {}", responseData.getUrl());
|
||||
storeChildUrls(responseData, resultData);
|
||||
throw new ChildUrlsException(resultData.getChildUrlSet(), "#processMetaRobots");
|
||||
} else if (nofollow) {
|
||||
logger.info("META(robots=nofollow): " + responseData.getUrl());
|
||||
logger.info("META(robots=nofollow): {}", responseData.getUrl());
|
||||
responseData.setNoFollow(true);
|
||||
}
|
||||
}
|
||||
|
@ -281,14 +281,14 @@ public class FessXpathTransformer extends XpathTransformer implements FessTransf
|
|||
}
|
||||
}
|
||||
if (noindex && nofollow) {
|
||||
logger.info("HEADER(robots=noindex,nofollow): " + responseData.getUrl());
|
||||
logger.info("HEADER(robots=noindex,nofollow): {}", responseData.getUrl());
|
||||
throw new ChildUrlsException(Collections.emptySet(), "#processXRobotsTag");
|
||||
} else if (noindex) {
|
||||
logger.info("HEADER(robots=noindex): " + responseData.getUrl());
|
||||
logger.info("HEADER(robots=noindex): {}", responseData.getUrl());
|
||||
storeChildUrls(responseData, resultData);
|
||||
throw new ChildUrlsException(resultData.getChildUrlSet(), "#processXRobotsTag");
|
||||
} else if (nofollow) {
|
||||
logger.info("HEADER(robots=nofollow): " + responseData.getUrl());
|
||||
logger.info("HEADER(robots=nofollow): {}", responseData.getUrl());
|
||||
responseData.setNoFollow(true);
|
||||
}
|
||||
});
|
||||
|
@ -345,7 +345,7 @@ public class FessXpathTransformer extends XpathTransformer implements FessTransf
|
|||
&& isValidCanonicalUrl(responseData.getUrl(), canonicalUrl)) {
|
||||
final Set<RequestData> childUrlSet = new HashSet<>();
|
||||
childUrlSet.add(RequestDataBuilder.newRequestData().get().url(canonicalUrl).build());
|
||||
logger.info("CANONICAL: " + responseData.getUrl() + " -> " + canonicalUrl);
|
||||
logger.info("CANONICAL: {} -> {}", responseData.getUrl(), canonicalUrl);
|
||||
throw new ChildUrlsException(childUrlSet, this.getClass().getName() + "#putAdditionalData");
|
||||
}
|
||||
|
||||
|
@ -593,7 +593,7 @@ public class FessXpathTransformer extends XpathTransformer implements FessTransf
|
|||
parseTextContent(node, buf);
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
logger.warn("Could not parse a value of " + xpath);
|
||||
logger.warn("Could not parse a value of {}", xpath);
|
||||
}
|
||||
if (buf == null) {
|
||||
return null;
|
||||
|
@ -781,7 +781,7 @@ public class FessXpathTransformer extends XpathTransformer implements FessTransf
|
|||
}
|
||||
|
||||
if (u == null) {
|
||||
logger.warn("Ignored child URL: " + attrValue + " in " + url);
|
||||
logger.warn("Ignored child URL: {} in {}", attrValue, url);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ public class KuromojiCreator extends DictionaryCreator {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
dictionaryManager.addCreator(this);
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ public class CharMappingCreator extends DictionaryCreator {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
dictionaryManager.addCreator(this);
|
||||
}
|
||||
|
|
|
@ -152,7 +152,7 @@ public class CharMappingFile extends DictionaryFile<CharMappingItem> {
|
|||
final Matcher m = parsePattern.matcher(replacedLine);
|
||||
|
||||
if (!m.find()) {
|
||||
logger.warn("Failed to parse " + line + " in " + path);
|
||||
logger.warn("Failed to parse {} in {}", line, path);
|
||||
if (updater != null) {
|
||||
updater.write("# " + line);
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ public class CharMappingFile extends DictionaryFile<CharMappingItem> {
|
|||
output = m.group(2).trim();
|
||||
|
||||
if (inputs == null || output == null || inputs.length == 0) {
|
||||
logger.warn("Failed to parse " + line + " in " + path);
|
||||
logger.warn("Failed to parse {} in {}", line, path);
|
||||
if (updater != null) {
|
||||
updater.write("# " + line);
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ public class ProtwordsCreator extends DictionaryCreator {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
dictionaryManager.addCreator(this);
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ public class StemmerOverrideCreator extends DictionaryCreator {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
dictionaryManager.addCreator(this);
|
||||
}
|
||||
|
|
|
@ -149,7 +149,7 @@ public class StemmerOverrideFile extends DictionaryFile<StemmerOverrideItem> {
|
|||
final Matcher m = parsePattern.matcher(replacedLine);
|
||||
|
||||
if (!m.find()) {
|
||||
logger.warn("Failed to parse " + line + " in " + path);
|
||||
logger.warn("Failed to parse {} in {}", line, path);
|
||||
if (updater != null) {
|
||||
updater.write("# " + line);
|
||||
}
|
||||
|
@ -160,7 +160,7 @@ public class StemmerOverrideFile extends DictionaryFile<StemmerOverrideItem> {
|
|||
final String output = m.group(2).trim();
|
||||
|
||||
if (input == null || output == null) {
|
||||
logger.warn("Failed to parse " + line + " in " + path);
|
||||
logger.warn("Failed to parse {} to {}", line, path);
|
||||
if (updater != null) {
|
||||
updater.write("# " + line);
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ public class StopwordsCreator extends DictionaryCreator {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
dictionaryManager.addCreator(this);
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ public class SynonymCreator extends DictionaryCreator {
|
|||
@PostConstruct
|
||||
public void register() {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + this.getClass().getSimpleName());
|
||||
logger.info("Load {}", this.getClass().getSimpleName());
|
||||
}
|
||||
dictionaryManager.addCreator(this);
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ public abstract class AbstractDataStore implements DataStore {
|
|||
try {
|
||||
readInterval = Long.parseLong(value);
|
||||
} catch (final NumberFormatException e) {
|
||||
logger.warn("Invalid read interval: " + value);
|
||||
logger.warn("Invalid read interval: {}", value);
|
||||
}
|
||||
}
|
||||
return readInterval;
|
||||
|
|
|
@ -94,7 +94,7 @@ public class FileListIndexUpdateCallbackImpl implements IndexUpdateCallback {
|
|||
// deleted file
|
||||
deleteDocument(paramMap, dataMap);
|
||||
} else {
|
||||
logger.warn("unknown event: " + eventType + ", data: " + dataMap);
|
||||
logger.warn("unknown event: {}, data: {}", eventType, dataMap);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -108,14 +108,14 @@ public class FileListIndexUpdateCallbackImpl implements IndexUpdateCallback {
|
|||
synchronized (indexUpdateCallback) {
|
||||
// required check
|
||||
if (!dataMap.containsKey(fessConfig.getIndexFieldUrl()) || dataMap.get(fessConfig.getIndexFieldUrl()) == null) {
|
||||
logger.warn("Could not add a doc. Invalid data: " + dataMap);
|
||||
logger.warn("Could not add a doc. Invalid data: {}", dataMap);
|
||||
return;
|
||||
}
|
||||
|
||||
final String url = dataMap.get(fessConfig.getIndexFieldUrl()).toString();
|
||||
final CrawlerClient client = crawlerClientFactory.getClient(url);
|
||||
if (client == null) {
|
||||
logger.warn("CrawlerClient is null. Data: " + dataMap);
|
||||
logger.warn("CrawlerClient is null. Data: {}", dataMap);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -147,7 +147,7 @@ public class FileListIndexUpdateCallbackImpl implements IndexUpdateCallback {
|
|||
final RuleManager ruleManager = SingletonLaContainer.getComponent(RuleManager.class);
|
||||
final Rule rule = ruleManager.getRule(responseData);
|
||||
if (rule == null) {
|
||||
logger.warn("No url rule. Data: " + dataMap);
|
||||
logger.warn("No url rule. Data: {}", dataMap);
|
||||
} else {
|
||||
responseData.setRuleId(rule.getRuleId());
|
||||
final ResponseProcessor responseProcessor = rule.getResponseProcessor();
|
||||
|
@ -199,7 +199,7 @@ public class FileListIndexUpdateCallbackImpl implements IndexUpdateCallback {
|
|||
|
||||
// required check
|
||||
if (!dataMap.containsKey(fessConfig.getIndexFieldUrl()) || dataMap.get(fessConfig.getIndexFieldUrl()) == null) {
|
||||
logger.warn("Could not delete a doc. Invalid data: " + dataMap);
|
||||
logger.warn("Could not delete a doc. Invalid data: {}", dataMap);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -224,7 +224,7 @@ public class DataConfig extends BsDataConfig implements CrawlingConfig {
|
|||
final String password = paramMap.get(CRAWLER_FILE_AUTH + "." + fileAuthName + ".password");
|
||||
|
||||
if (StringUtil.isEmpty(username)) {
|
||||
logger.warn("username is empty. fileAuth:" + fileAuthName);
|
||||
logger.warn("username is empty. fileAuth:{}", fileAuthName);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -263,7 +263,7 @@ public class DataConfig extends BsDataConfig implements CrawlingConfig {
|
|||
final String password = paramMap.get(CRAWLER_FILE_AUTH + "." + fileAuthName + ".password");
|
||||
|
||||
if (StringUtil.isEmpty(username)) {
|
||||
logger.warn("username is empty. fileAuth:" + fileAuthName);
|
||||
logger.warn("username is empty. fileAuth:{}", fileAuthName);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -244,7 +244,7 @@ public class Crawler {
|
|||
if (Constants.CRAWLER_PROCESS_COMMAND_THREAD_DUMP.equals(command)) {
|
||||
ThreadDumpUtil.printThreadDump();
|
||||
} else {
|
||||
logger.warn("Unknown process command: " + command);
|
||||
logger.warn("Unknown process command: {}", command);
|
||||
}
|
||||
if (Thread.interrupted()) {
|
||||
return;
|
||||
|
@ -364,7 +364,7 @@ public class Crawler {
|
|||
}
|
||||
buf.append(entry.getKey()).append('=').append(entry.getValue());
|
||||
}
|
||||
logger.info("[CRAWL INFO] " + buf.toString());
|
||||
logger.info("[CRAWL INFO] {}", buf.toString());
|
||||
|
||||
// notification
|
||||
try {
|
||||
|
@ -530,7 +530,7 @@ public class Crawler {
|
|||
try {
|
||||
crawlerThread.join();
|
||||
} catch (final Exception e) {
|
||||
logger.info("Interrupted a crawling process: " + crawlerThread.getName());
|
||||
logger.info("Interrupted a crawling process: {}", crawlerThread.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -141,7 +141,7 @@ public class ThumbnailGenerator {
|
|||
|
||||
final int totalCount = process(options);
|
||||
if (totalCount != 0) {
|
||||
logger.info("Created " + totalCount + " thumbnail files.");
|
||||
logger.info("Created {} thumbnail files.", totalCount);
|
||||
} else {
|
||||
logger.info("No new thumbnails found.");
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ public class CrawlingInfoHelper {
|
|||
public void updateParams(final String sessionId, final String name, final int dayForCleanup) {
|
||||
final CrawlingInfo crawlingInfo = getCrawlingInfoService().getLast(sessionId);
|
||||
if (crawlingInfo == null) {
|
||||
logger.warn("No crawling session: " + sessionId);
|
||||
logger.warn("No crawling session: {}", sessionId);
|
||||
return;
|
||||
}
|
||||
if (StringUtil.isNotBlank(name)) {
|
||||
|
|
|
@ -161,7 +161,7 @@ public class DataIndexHelper {
|
|||
final long execTime = System.currentTimeMillis() - startTime;
|
||||
crawlingInfoHelper.putToInfoMap(Constants.DATA_CRAWLING_EXEC_TIME, Long.toString(execTime));
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("[EXEC TIME] crawling time: " + execTime + "ms");
|
||||
logger.info("[EXEC TIME] crawling time: {}ms", execTime);
|
||||
}
|
||||
|
||||
crawlingInfoHelper.putToInfoMap(Constants.DATA_INDEX_EXEC_TIME, Long.toString(indexUpdateCallback.getExecuteTime()));
|
||||
|
@ -210,7 +210,7 @@ public class DataIndexHelper {
|
|||
final DataStoreFactory dataStoreFactory = ComponentUtil.getDataStoreFactory();
|
||||
dataStore = dataStoreFactory.getDataStore(dataConfig.getHandlerName());
|
||||
if (dataStore == null) {
|
||||
logger.error("DataStore(" + dataConfig.getHandlerName() + ") is not found.");
|
||||
logger.error("DataStore({}) is not found.", dataConfig.getHandlerName());
|
||||
} else {
|
||||
try {
|
||||
dataStore.store(dataConfig, indexUpdateCallback, initParamMap);
|
||||
|
@ -231,7 +231,7 @@ public class DataIndexHelper {
|
|||
}
|
||||
final String sessionId = initParamMap.get(Constants.SESSION_ID);
|
||||
if (StringUtil.isBlank(sessionId)) {
|
||||
logger.warn("Invalid sessionId at " + dataConfig);
|
||||
logger.warn("Invalid sessionId at {}", dataConfig);
|
||||
return;
|
||||
}
|
||||
final FessConfig fessConfig = ComponentUtil.getFessConfig();
|
||||
|
|
|
@ -78,12 +78,12 @@ public class IndexingHelper {
|
|||
}
|
||||
if (logger.isInfoEnabled()) {
|
||||
if (docList.getContentSize() > 0) {
|
||||
logger.info("Sent " + docList.size() + " docs (Doc:{process " + docList.getProcessingTime() + "ms, send "
|
||||
+ (System.currentTimeMillis() - execTime) + "ms, size "
|
||||
+ MemoryUtil.byteCountToDisplaySize(docList.getContentSize()) + "}, " + MemoryUtil.getMemoryUsageLog() + ")");
|
||||
logger.info("Sent {} docs (Doc:{process {}ms, send {}ms, size {}}, {})", docList.size(), docList.getProcessingTime(),
|
||||
(System.currentTimeMillis() - execTime),
|
||||
MemoryUtil.byteCountToDisplaySize(docList.getContentSize()), MemoryUtil.getMemoryUsageLog());
|
||||
} else {
|
||||
logger.info("Sent " + docList.size() + " docs (Doc:{send " + (System.currentTimeMillis() - execTime) + "ms}, "
|
||||
+ MemoryUtil.getMemoryUsageLog() + ")");
|
||||
logger.info("Sent {} docs (Doc:{send {}ms}, {})", docList.size(),(System.currentTimeMillis() - execTime),
|
||||
MemoryUtil.getMemoryUsageLog());
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
|
|
|
@ -59,7 +59,7 @@ public class JobHelper {
|
|||
|
||||
final String id = scheduledJob.getId();
|
||||
if (!Constants.T.equals(scheduledJob.getAvailable())) {
|
||||
logger.info("Inactive Job " + id + ":" + scheduledJob.getName());
|
||||
logger.info("Inactive Job {}:{}", id, scheduledJob.getName());
|
||||
try {
|
||||
unregister(scheduledJob);
|
||||
} catch (final Exception e) {
|
||||
|
@ -76,35 +76,35 @@ public class JobHelper {
|
|||
final Map<String, Object> params = new HashMap<>();
|
||||
ComponentUtil.getComponent(ScheduledJobBhv.class).selectByPK(scheduledJob.getId())
|
||||
.ifPresent(e -> params.put(Constants.SCHEDULED_JOB, e)).orElse(() -> {
|
||||
logger.warn("Job " + scheduledJob.getId() + " is not found.");
|
||||
logger.warn("Job {} is not found.", scheduledJob.getId());
|
||||
});
|
||||
return params;
|
||||
};
|
||||
findJobByUniqueOf(LaJobUnique.of(id)).ifPresent(job -> {
|
||||
if (!job.isUnscheduled()) {
|
||||
if (StringUtil.isNotBlank(scheduledJob.getCronExpression())) {
|
||||
logger.info("Starting Job " + id + ":" + scheduledJob.getName());
|
||||
logger.info("Starting Job {}:{}", id, scheduledJob.getName());
|
||||
final String cronExpression = scheduledJob.getCronExpression();
|
||||
job.reschedule(cronExpression, op -> op.changeNoticeLogToDebug().params(paramsOp));
|
||||
} else {
|
||||
logger.info("Inactive Job " + id + ":" + scheduledJob.getName());
|
||||
logger.info("Inactive Job {}:{}", id, scheduledJob.getName());
|
||||
job.becomeNonCron();
|
||||
}
|
||||
} else if (StringUtil.isNotBlank(scheduledJob.getCronExpression())) {
|
||||
logger.info("Starting Job " + id + ":" + scheduledJob.getName());
|
||||
logger.info("Starting Job {}:{}", id, scheduledJob.getName());
|
||||
final String cronExpression = scheduledJob.getCronExpression();
|
||||
job.reschedule(cronExpression, op -> op.changeNoticeLogToDebug().params(paramsOp));
|
||||
}
|
||||
}).orElse(
|
||||
() -> {
|
||||
if (StringUtil.isNotBlank(scheduledJob.getCronExpression())) {
|
||||
logger.info("Starting Job " + id + ":" + scheduledJob.getName());
|
||||
logger.info("Starting Job {}:{}", id, scheduledJob.getName());
|
||||
final String cronExpression = scheduledJob.getCronExpression();
|
||||
cron.register(cronExpression, fessConfig.getSchedulerJobClassAsClass(),
|
||||
fessConfig.getSchedulerConcurrentExecModeAsEnum(),
|
||||
op -> op.uniqueBy(id).changeNoticeLogToDebug().params(paramsOp));
|
||||
} else {
|
||||
logger.info("Inactive Job " + id + ":" + scheduledJob.getName());
|
||||
logger.info("Inactive Job {}:{}", id, scheduledJob.getName());
|
||||
cron.registerNonCron(fessConfig.getSchedulerJobClassAsClass(), fessConfig.getSchedulerConcurrentExecModeAsEnum(),
|
||||
op -> op.uniqueBy(id).changeNoticeLogToDebug().params(paramsOp));
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ public class OpenSearchHelper {
|
|||
osddFile = new File(path);
|
||||
if (!osddFile.isFile()) {
|
||||
osddFile = null;
|
||||
logger.warn(path + " was not found.");
|
||||
logger.warn("{} was not found.", path);
|
||||
}
|
||||
} else {
|
||||
logger.info("OSDD file is not found.");
|
||||
|
|
|
@ -47,11 +47,11 @@ public class ProcessHelper {
|
|||
public void destroy() {
|
||||
for (final String sessionId : runningProcessMap.keySet()) {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Stopping process " + sessionId);
|
||||
logger.info("Stopping process {}", sessionId);
|
||||
}
|
||||
if (destroyProcess(sessionId) == 0) {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Stopped process " + sessionId);
|
||||
logger.info("Stopped process {}", sessionId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ public class RelatedContentHelper {
|
|||
if (entity.getTerm().startsWith(regexPrefix)) {
|
||||
final String regex = entity.getTerm().substring(regexPrefix.length());
|
||||
if (StringUtil.isBlank(regex)) {
|
||||
logger.warn("Unknown regex pattern: " + entity.getTerm());
|
||||
logger.warn("Unknown regex pattern: {}", entity.getTerm());
|
||||
} else {
|
||||
pair.getSecond().add(new Pair<>(Pattern.compile(regex), entity.getContent()));
|
||||
}
|
||||
|
|
|
@ -328,7 +328,7 @@ public class SearchLogHelper {
|
|||
}
|
||||
clickCountMap.put(docId, countObj);
|
||||
}).orElse(() -> {
|
||||
logger.warn("Not Found for SearchLog: " + clickLog);
|
||||
logger.warn("Not Found for SearchLog: {}", clickLog);
|
||||
});
|
||||
} catch (final Exception e) {
|
||||
logger.warn("Failed to process: " + clickLog, e);
|
||||
|
|
|
@ -144,7 +144,7 @@ public class WebFsIndexHelper {
|
|||
try {
|
||||
urlFilterService.delete(sid);
|
||||
} catch (final Exception e) {
|
||||
logger.warn("Failed to delete url filters for " + sid);
|
||||
logger.warn("Failed to delete url filters for {}", sid);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -156,7 +156,7 @@ public class WebFsIndexHelper {
|
|||
final String u = duplicateHostHelper.convert(urlValue);
|
||||
crawler.addUrl(u);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Target URL: " + u);
|
||||
logger.info("Target URL: {}", u);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
@ -166,7 +166,7 @@ public class WebFsIndexHelper {
|
|||
if (!urlValue.startsWith("#")) {
|
||||
crawler.addIncludeFilter(urlValue);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Included URL: " + urlValue);
|
||||
logger.info("Included URL: {}", urlValue);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
@ -176,7 +176,7 @@ public class WebFsIndexHelper {
|
|||
if (!urlValue.startsWith("#")) {
|
||||
crawler.addExcludeFilter(urlValue);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Excluded URL: " + urlValue);
|
||||
logger.info("Excluded URL: {}", urlValue);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
@ -188,7 +188,7 @@ public class WebFsIndexHelper {
|
|||
final String urlValue = Pattern.quote(u);
|
||||
crawler.addExcludeFilter(urlValue);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Excluded URL from failures: " + urlValue);
|
||||
logger.info("Excluded URL from failures: {}", urlValue);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -250,7 +250,7 @@ public class WebFsIndexHelper {
|
|||
try {
|
||||
urlFilterService.delete(sid);
|
||||
} catch (final Exception e) {
|
||||
logger.warn("Failed to delete url filters for " + sid);
|
||||
logger.warn("Failed to delete url filters for {}", sid);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,7 +269,7 @@ public class WebFsIndexHelper {
|
|||
}
|
||||
crawler.addUrl(u);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Target Path: " + u);
|
||||
logger.info("Target Path: {}", u);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
@ -287,7 +287,7 @@ public class WebFsIndexHelper {
|
|||
}
|
||||
crawler.addIncludeFilter(urlValue);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Included Path: " + urlValue);
|
||||
logger.info("Included Path: {}", urlValue);
|
||||
}
|
||||
} else if (line.startsWith("#DISABLE_URL_ENCODE")) {
|
||||
urlEncodeDisabled.set(true);
|
||||
|
@ -307,7 +307,7 @@ public class WebFsIndexHelper {
|
|||
}
|
||||
crawler.addExcludeFilter(urlValue);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Excluded Path: " + urlValue);
|
||||
logger.info("Excluded Path: {}", urlValue);
|
||||
}
|
||||
} else if (line.startsWith("#DISABLE_URL_ENCODE")) {
|
||||
urlEncodeDisabled.set(true);
|
||||
|
@ -321,7 +321,7 @@ public class WebFsIndexHelper {
|
|||
final String urlValue = Pattern.quote(u);
|
||||
crawler.addExcludeFilter(urlValue);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Excluded Path from failures: " + urlValue);
|
||||
logger.info("Excluded Path from failures: {}", urlValue);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -409,7 +409,7 @@ public class WebFsIndexHelper {
|
|||
final long execTime = System.currentTimeMillis() - startTime;
|
||||
crawlingInfoHelper.putToInfoMap(Constants.WEB_FS_CRAWLING_EXEC_TIME, Long.toString(execTime));
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("[EXEC TIME] crawling time: " + execTime + "ms");
|
||||
logger.info("[EXEC TIME] crawling time: {}ms", execTime);
|
||||
}
|
||||
|
||||
indexUpdater.setFinishCrawling(true);
|
||||
|
|
|
@ -259,7 +259,7 @@ public class IndexUpdater extends Thread {
|
|||
|
||||
if (emptyListCount >= maxEmptyListCount) {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Terminating indexUpdater. " + "emptyListCount is over " + maxEmptyListCount + ".");
|
||||
logger.info("Terminating indexUpdater. emptyListCount is over {}.", maxEmptyListCount);
|
||||
}
|
||||
// terminate crawling
|
||||
finishCrawling = true;
|
||||
|
@ -303,7 +303,7 @@ public class IndexUpdater extends Thread {
|
|||
}
|
||||
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("[EXEC TIME] index update time: " + executeTime + "ms");
|
||||
logger.info("[EXEC TIME] index update time: {}ms", executeTime);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -334,14 +334,14 @@ public class IndexUpdater extends Thread {
|
|||
final Transformer transformer = ComponentUtil.getComponent(accessResultData.getTransformerName());
|
||||
if (transformer == null) {
|
||||
// no transformer
|
||||
logger.warn("No transformer: " + accessResultData.getTransformerName());
|
||||
logger.warn("No transformer: {}", accessResultData.getTransformerName());
|
||||
continue;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
final Map<String, Object> map = (Map<String, Object>) transformer.getData(accessResultData);
|
||||
if (map.isEmpty()) {
|
||||
// no transformer
|
||||
logger.warn("No data: " + accessResult.getUrl());
|
||||
logger.warn("No data: {}", accessResult.getUrl());
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -501,8 +501,7 @@ public class IndexUpdater extends Thread {
|
|||
final IntervalControlHelper intervalControlHelper = ComponentUtil.getIntervalControlHelper();
|
||||
if (totalHits > unprocessedDocumentSize && intervalControlHelper.isCrawlerRunning()) {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Stopped all crawler threads. " + " You have " + totalHits + " (>" + unprocessedDocumentSize + ") "
|
||||
+ " unprocessed docs.");
|
||||
logger.info("Stopped all crawler threads. You have {} (>{}) unprocessed docs.", totalHits, unprocessedDocumentSize);
|
||||
}
|
||||
intervalControlHelper.setCrawlerRunning(false);
|
||||
}
|
||||
|
@ -525,7 +524,7 @@ public class IndexUpdater extends Thread {
|
|||
finishedSessionIdList.clear();
|
||||
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Deleted completed document data. " + "The execution time is " + (System.currentTimeMillis() - execTime) + "ms.");
|
||||
logger.info("Deleted completed document data. The execution time is {}ms.", (System.currentTimeMillis() - execTime));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ public abstract class ScoreBooster {
|
|||
if (bulkRequestBuilder != null) {
|
||||
final BulkResponse response = bulkRequestBuilder.execute().actionGet(requestTimeout);
|
||||
if (response.hasFailures()) {
|
||||
logger.warn("Failed to update scores: " + response.buildFailureMessage());
|
||||
logger.warn("Failed to update scores: {}", response.buildFailureMessage());
|
||||
}
|
||||
bulkRequestBuilder = null;
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class SsoManager {
|
|||
|
||||
public void register(final SsoAuthenticator authenticator) {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Load " + authenticator.getClass().getSimpleName());
|
||||
logger.info("Load {}", authenticator.getClass().getSimpleName());
|
||||
}
|
||||
authenticatorList.add(authenticator);
|
||||
}
|
||||
|
|
|
@ -104,7 +104,7 @@ public class ThumbnailManager {
|
|||
}
|
||||
}
|
||||
if (baseDir.mkdirs()) {
|
||||
logger.info("Created: " + baseDir.getAbsolutePath());
|
||||
logger.info("Created: {}", baseDir.getAbsolutePath());
|
||||
}
|
||||
if (!baseDir.isDirectory()) {
|
||||
throw new FessSystemException("Not found: " + baseDir.getAbsolutePath());
|
||||
|
@ -246,7 +246,7 @@ public class ThumbnailManager {
|
|||
final File noImageFile = new File(outputFile.getAbsolutePath() + NOIMAGE_FILE_SUFFIX);
|
||||
if (!noImageFile.isFile() || System.currentTimeMillis() - noImageFile.lastModified() > noImageExpired) {
|
||||
if (noImageFile.isFile() && !noImageFile.delete()) {
|
||||
logger.warn("Failed to delete " + noImageFile.getAbsolutePath());
|
||||
logger.warn("Failed to delete {}", noImageFile.getAbsolutePath());
|
||||
}
|
||||
final ThumbnailGenerator generator = ComponentUtil.getComponent(generatorName);
|
||||
if (generator.isAvailable()) {
|
||||
|
@ -259,7 +259,7 @@ public class ThumbnailManager {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
logger.warn(generatorName + " is not available.");
|
||||
logger.warn("{} is not available.", generatorName);
|
||||
}
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("No image file exists: {}", noImageFile.getAbsolutePath());
|
||||
|
@ -279,7 +279,7 @@ public class ThumbnailManager {
|
|||
logger.debug("Add thumbnail task: {}", task);
|
||||
}
|
||||
if (!thumbnailTaskQueue.offer(task)) {
|
||||
logger.warn("Failed to add thumbnail task: " + task);
|
||||
logger.warn("Failed to add thumbnail task: {}", task);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -507,7 +507,7 @@ public class ThumbnailManager {
|
|||
// ignore
|
||||
}
|
||||
Files.move(path, newPath);
|
||||
logger.info("Move " + path + " to " + newPath);
|
||||
logger.info("Move {} to {}", path, newPath);
|
||||
} catch (final IOException e) {
|
||||
logger.warn("Failed to move " + path, e);
|
||||
}
|
||||
|
|
|
@ -201,7 +201,7 @@ public abstract class BaseThumbnailGenerator implements ThumbnailGenerator {
|
|||
}
|
||||
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Generating Thumbnail: " + url);
|
||||
logger.info("Generating Thumbnail: {}", url);
|
||||
}
|
||||
|
||||
final CrawlerClientFactory crawlerClientFactory =
|
||||
|
|
|
@ -82,7 +82,7 @@ public class CommandGenerator extends BaseThumbnailGenerator {
|
|||
parentFile.mkdirs();
|
||||
}
|
||||
if (!parentFile.isDirectory()) {
|
||||
logger.warn("Not found: " + parentFile.getAbsolutePath());
|
||||
logger.warn("Not found: {}", parentFile.getAbsolutePath());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -101,9 +101,9 @@ public class CommandGenerator extends BaseThumbnailGenerator {
|
|||
executeCommand(thumbnailId, cmdList);
|
||||
|
||||
if (outputFile.isFile() && outputFile.length() == 0) {
|
||||
logger.warn("Thumbnail File is empty. ID is " + thumbnailId);
|
||||
logger.warn("Thumbnail File is empty. ID is {}", thumbnailId);
|
||||
if (outputFile.delete()) {
|
||||
logger.info("Deleted: " + outputFile.getAbsolutePath());
|
||||
logger.info("Deleted: {}", outputFile.getAbsolutePath());
|
||||
}
|
||||
updateThumbnailField(thumbnailId, StringUtil.EMPTY);
|
||||
return false;
|
||||
|
@ -188,7 +188,7 @@ public class CommandGenerator extends BaseThumbnailGenerator {
|
|||
|
||||
@Override
|
||||
public void run() {
|
||||
logger.warn("CommandGenerator is timed out: " + commandList);
|
||||
logger.warn("CommandGenerator is timed out: {}", commandList);
|
||||
try {
|
||||
p.destroyForcibly().waitFor(timeout, TimeUnit.MILLISECONDS);
|
||||
} catch (final Exception e) {
|
||||
|
|
|
@ -74,7 +74,7 @@ public class HtmlTagBasedGenerator extends BaseThumbnailGenerator {
|
|||
parentFile.mkdirs();
|
||||
}
|
||||
if (!parentFile.isDirectory()) {
|
||||
logger.warn("Not found: " + parentFile.getAbsolutePath());
|
||||
logger.warn("Not found: {}", parentFile.getAbsolutePath());
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -95,10 +95,10 @@ public class HtmlTagBasedGenerator extends BaseThumbnailGenerator {
|
|||
created = true;
|
||||
break;
|
||||
case FAILED:
|
||||
logger.warn("Failed to create thumbnail: " + thumbnailId + " -> " + responseData.getUrl());
|
||||
logger.warn("Failed to create thumbnail: {} -> {}", thumbnailId, responseData.getUrl());
|
||||
break;
|
||||
case INVALID_SIZE:
|
||||
logger.info("Unmatched thumbnail size: " + thumbnailId + " -> " + responseData.getUrl());
|
||||
logger.info("Unmatched thumbnail size: {} -> {}", thumbnailId, responseData.getUrl());
|
||||
break;
|
||||
case NO_IMAGE:
|
||||
if (logger.isDebugEnabled()) {
|
||||
|
@ -106,7 +106,7 @@ public class HtmlTagBasedGenerator extends BaseThumbnailGenerator {
|
|||
}
|
||||
break;
|
||||
default:
|
||||
logger.error("Unknown thumbnail result: " + thumbnailId + " -> " + responseData.getUrl());
|
||||
logger.error("Unknown thumbnail result: {} -> {}", thumbnailId, responseData.getUrl());
|
||||
break;
|
||||
}
|
||||
} catch (final Throwable t) {
|
||||
|
@ -120,7 +120,7 @@ public class HtmlTagBasedGenerator extends BaseThumbnailGenerator {
|
|||
if (!created) {
|
||||
updateThumbnailField(thumbnailId, StringUtil.EMPTY);
|
||||
if (outputFile.exists() && !outputFile.delete()) {
|
||||
logger.warn("Failed to delete " + outputFile.getAbsolutePath());
|
||||
logger.warn("Failed to delete {}", outputFile.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,10 +50,10 @@ public final class UpgradeUtil {
|
|||
final String source = FileUtil.readUTF8(filePath);
|
||||
try (CurlResponse response = ComponentUtil.getCurlHelper().post("/_configsync/file").param("path", path).body(source).execute()) {
|
||||
if (response.getHttpStatusCode() == 200) {
|
||||
logger.info("Register " + path + " to " + indexName);
|
||||
logger.info("Register {} to {}", path, indexName);
|
||||
return true;
|
||||
} else {
|
||||
logger.warn("Invalid request for " + path);
|
||||
logger.warn("Invalid request for {}", path);
|
||||
}
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
|
@ -74,7 +74,7 @@ public final class UpgradeUtil {
|
|||
indicesClient.prepareAliases().addAlias(indexName, aliasName, source).execute()
|
||||
.actionGet(fessConfig.getIndexIndicesTimeout());
|
||||
if (response.isAcknowledged()) {
|
||||
logger.info("Created " + aliasName + " alias for " + indexName);
|
||||
logger.info("Created {} alias for {}", aliasName, indexName);
|
||||
return true;
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("Failed to create {} alias for {}", aliasName, indexName);
|
||||
|
@ -107,10 +107,10 @@ public final class UpgradeUtil {
|
|||
indicesClient.preparePutMapping(index).setSource(source, XContentType.JSON).execute()
|
||||
.actionGet(fessConfig.getIndexIndicesTimeout());
|
||||
if (putMappingResponse.isAcknowledged()) {
|
||||
logger.info("Created " + index + "/" + type + " mapping.");
|
||||
logger.info("Created {}/{} mapping.", index, type);
|
||||
return true;
|
||||
} else {
|
||||
logger.warn("Failed to create " + index + "/" + type + " mapping.");
|
||||
logger.warn("Failed to create {}/{} mapping.", index, type);
|
||||
}
|
||||
// TODO bulk
|
||||
} catch (final Exception e) {
|
||||
|
@ -130,7 +130,7 @@ public final class UpgradeUtil {
|
|||
final AcknowledgedResponse pmResponse =
|
||||
indicesClient.preparePutMapping(index).setSource(source, XContentType.JSON).execute().actionGet();
|
||||
if (!pmResponse.isAcknowledged()) {
|
||||
logger.warn("Failed to add " + field + " to " + index + "/" + type);
|
||||
logger.warn("Failed to add {} to {}/{}", field, index, type);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
|
@ -150,7 +150,7 @@ public final class UpgradeUtil {
|
|||
final PutMappingRequestBuilder builder = indicesClient.preparePutMapping(index).setSource(source, XContentType.JSON);
|
||||
final AcknowledgedResponse pmResponse = builder.execute().actionGet();
|
||||
if (!pmResponse.isAcknowledged()) {
|
||||
logger.warn("Failed to update " + index + " settings.");
|
||||
logger.warn("Failed to update {} settings.", index);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ public final class UpgradeUtil {
|
|||
|
||||
@Override
|
||||
public void onResponse(final AcknowledgedResponse response) {
|
||||
logger.info("Deleted " + index + " index.");
|
||||
logger.info("Deleted {} index.", index);
|
||||
comsumer.accept(response);
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ public class CrawlTestBase extends ITBase {
|
|||
final String schedulerId = getSchedulerIds(namePrefix).get(0);
|
||||
final Response response = checkMethodBase(requestBody).post("/api/admin/scheduler/" + schedulerId + "/start");
|
||||
if (response.getBody().jsonPath().getInt("response.status") == 0) {
|
||||
logger.info("Start scheduler \"" + schedulerId + "\"");
|
||||
logger.info("Start scheduler \"{}\"", schedulerId);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
|
|
|
@ -107,7 +107,7 @@ public abstract class CrudTestBase extends ITBase {
|
|||
checkPutMethod(requestBody, getItemEndpointSuffix()).then().body("response.created", equalTo(true))
|
||||
.body("response.status", equalTo(0));
|
||||
|
||||
//logger.info("create " + i + checkPutMethod(requestBody, getItemEndpointSuffix()).asString()); // for debugging
|
||||
//logger.info("create {}{}", i, checkPutMethod(requestBody, getItemEndpointSuffix()).asString()); // for debugging
|
||||
refresh();
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ public class ITBase {
|
|||
public static String settingTestToken() {
|
||||
final String testToken = System.getProperty(TEST_TOKEN);
|
||||
if (testToken != null) {
|
||||
logger.info("Token: " + testToken);
|
||||
logger.info("Token: {}", testToken);
|
||||
return testToken;
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ public class ITBase {
|
|||
+ "\"}}\n{\"updatedTime\":1490250145200,\"updatedBy\":\"admin\",\"createdBy\":\"admin\",\"permissions\":[\"Radmin-api\",\"Rguest\"],\"name\":\"Admin API\",\"createdTime\":1490250145200,\"token\":\""
|
||||
+ DEFAULT_TEST_TOKEN + "\"}\n").when().post(getEsUrl() + "/_bulk");
|
||||
given().contentType("application/json").when().post(getEsUrl() + "/_refresh");
|
||||
logger.info("Created Token: " + DEFAULT_TEST_TOKEN);
|
||||
logger.info("Created Token: {}", DEFAULT_TEST_TOKEN);
|
||||
return DEFAULT_TEST_TOKEN;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue