refactor: merge develop

This commit is contained in:
chuntaojun 2020-05-21 18:08:29 +08:00
commit 87a32a926b
8 changed files with 166 additions and 113 deletions

View File

@ -18,19 +18,46 @@
package com.alibaba.nacos.client; package com.alibaba.nacos.client;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService; import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.common.utils.ThreadUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Properties;
/** /**
* @author <a href="mailto:liaochuntao@live.com">liaochuntao</a> * @author <a href="mailto:liaochuntao@live.com">liaochuntao</a>
*/ */
@Ignore
public class ConfigTest { public class ConfigTest {
private ConfigService config7; private ConfigService configService;
private ConfigService config8;
private ConfigService config9;
public void test() { @Before
public void before() throws Exception {
Properties properties = new Properties();
properties.setProperty(PropertyKeyConst.NAMESPACE, "bebf0150-e1ea-47e2-81fe-6814caf2b952");
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "127.0.0.1:8848");
properties.setProperty(PropertyKeyConst.USERNAME, "chuntaojun");
properties.setProperty(PropertyKeyConst.PASSWORD, "1017");
configService = NacosFactory.createConfigService(properties);
}
@Test
public void test() throws Exception {
final String dataId = "lessspring";
final String group = "lessspring";
final String content = "lessspring-" + System.currentTimeMillis();
boolean result = configService.publishConfig(dataId, group, content);
Assert.assertTrue(result);
ThreadUtils.sleep(10_000);
String response = configService.getConfig(dataId, group, 5000);
System.out.println(response);
} }
} }

View File

@ -164,4 +164,4 @@ public class Observable {
} }
} }

View File

@ -57,7 +57,8 @@ public final class ThreadUtils {
} }
/** /**
* 通过内核数算出合适的线程数1.5-2倍cpu内核数 * Through the number of cores, calculate the appropriate number of threads;
* 1.5-2 times the number of CPU cores
* *
* @return thread count * @return thread count
*/ */

View File

@ -19,6 +19,7 @@ import com.alibaba.nacos.common.utils.IoUtils;
import com.alibaba.nacos.common.utils.MD5Utils; import com.alibaba.nacos.common.utils.MD5Utils;
import com.alibaba.nacos.common.utils.Observable; import com.alibaba.nacos.common.utils.Observable;
import com.alibaba.nacos.common.utils.Observer; import com.alibaba.nacos.common.utils.Observer;
import com.alibaba.nacos.common.utils.ThreadUtils;
import com.alibaba.nacos.config.server.constant.Constants; import com.alibaba.nacos.config.server.constant.Constants;
import com.alibaba.nacos.config.server.manager.TaskManager; import com.alibaba.nacos.config.server.manager.TaskManager;
import com.alibaba.nacos.config.server.model.ConfigInfo; import com.alibaba.nacos.config.server.model.ConfigInfo;
@ -45,10 +46,10 @@ import com.alibaba.nacos.core.distributed.raft.exception.NoSuchRaftGroupExceptio
import com.alibaba.nacos.core.utils.ApplicationUtils; import com.alibaba.nacos.core.utils.ApplicationUtils;
import com.alibaba.nacos.core.utils.GlobalExecutor; import com.alibaba.nacos.core.utils.GlobalExecutor;
import com.alibaba.nacos.core.utils.InetUtils; import com.alibaba.nacos.core.utils.InetUtils;
import com.alibaba.nacos.core.utils.TimerContext;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
@ -65,6 +66,7 @@ import java.util.Random;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import static com.alibaba.nacos.config.server.utils.LogUtil.fatalLog; import static com.alibaba.nacos.config.server.utils.LogUtil.fatalLog;
@ -84,8 +86,8 @@ public class DumpService {
* Here you inject the dependent objects constructively, ensuring that some * Here you inject the dependent objects constructively, ensuring that some
* of the dependent functionality is initialized ahead of time * of the dependent functionality is initialized ahead of time
* *
* @param persistService {@link PersistService} * @param persistService {@link PersistService}
* @param memberManager {@link ServerMemberManager} * @param memberManager {@link ServerMemberManager}
* @param protocolManager {@link ProtocolManager} * @param protocolManager {@link ProtocolManager}
*/ */
public DumpService(PersistService persistService, ServerMemberManager memberManager, public DumpService(PersistService persistService, ServerMemberManager memberManager,
@ -103,12 +105,8 @@ public class DumpService {
return memberManager; return memberManager;
} }
public ProtocolManager getProtocolManager() {
return protocolManager;
}
@PostConstruct @PostConstruct
protected void init() throws Exception { protected void init() throws Throwable {
DynamicDataSource.getInstance().getDataSource(); DynamicDataSource.getInstance().getDataSource();
DumpProcessor processor = new DumpProcessor(this); DumpProcessor processor = new DumpProcessor(this);
@ -132,6 +130,9 @@ public class DumpService {
.info("With embedded distributed storage, you need to wait for " .info("With embedded distributed storage, you need to wait for "
+ "the underlying master to complete before you can perform the dump operation."); + "the underlying master to complete before you can perform the dump operation.");
AtomicReference<Throwable> errorReference = new AtomicReference<>(null);
CountDownLatch waitDumpFinish = new CountDownLatch(1);
// watch path => /nacos_config/leader/ has value ? // watch path => /nacos_config/leader/ has value ?
Observer observer = new Observer() { Observer observer = new Observer() {
@ -142,8 +143,16 @@ public class DumpService {
if (Objects.isNull(arg)) { if (Objects.isNull(arg)) {
return; return;
} }
dumpOperate(processor, dumpAllProcessor, dumpAllBetaProcessor, try {
dumpAllTagProcessor); dumpOperate(processor, dumpAllProcessor, dumpAllBetaProcessor,
dumpAllTagProcessor);
}
catch (Throwable ex) {
errorReference.set(ex);
}
finally {
waitDumpFinish.countDown();
}
protocol.protocolMetaData() protocol.protocolMetaData()
.unSubscribe(Constants.CONFIG_MODEL_RAFT_GROUP, .unSubscribe(Constants.CONFIG_MODEL_RAFT_GROUP,
com.alibaba.nacos.consistency.cp.Constants.LEADER_META_DATA, com.alibaba.nacos.consistency.cp.Constants.LEADER_META_DATA,
@ -156,116 +165,134 @@ public class DumpService {
com.alibaba.nacos.consistency.cp.Constants.LEADER_META_DATA, com.alibaba.nacos.consistency.cp.Constants.LEADER_META_DATA,
observer); observer);
// When all dump is complete, allow the following flow // We must wait for the dump task to complete the callback operation before
// continuing with the initialization
ThreadUtils.latchAwait(waitDumpFinish);
// If an exception occurs during the execution of the dump task, the exception
// needs to be thrown, triggering the node to start the failed process
final Throwable ex = errorReference.get();
if (Objects.nonNull(ex)) {
throw ex;
}
} }
else { else {
dumpOperate(processor, dumpAllProcessor, dumpAllBetaProcessor, dumpAllTagProcessor); dumpOperate(processor, dumpAllProcessor, dumpAllBetaProcessor,
dumpAllTagProcessor);
} }
} }
private void dumpOperate(DumpProcessor processor, DumpAllProcessor dumpAllProcessor, private void dumpOperate(DumpProcessor processor, DumpAllProcessor dumpAllProcessor,
DumpAllBetaProcessor dumpAllBetaProcessor, DumpAllBetaProcessor dumpAllBetaProcessor,
DumpAllTagProcessor dumpAllTagProcessor) { DumpAllTagProcessor dumpAllTagProcessor) {
LogUtil.defaultLog.warn("DumpService start"); TimerContext.start("config dump job");
try {
LogUtil.defaultLog.warn("DumpService start");
Runnable dumpAll = () -> dumpAllTaskMgr Runnable dumpAll = () -> dumpAllTaskMgr
.addTask(DumpAllTask.TASK_ID, new DumpAllTask()); .addTask(DumpAllTask.TASK_ID, new DumpAllTask());
Runnable dumpAllBeta = () -> dumpAllTaskMgr Runnable dumpAllBeta = () -> dumpAllTaskMgr
.addTask(DumpAllBetaTask.TASK_ID, new DumpAllBetaTask()); .addTask(DumpAllBetaTask.TASK_ID, new DumpAllBetaTask());
Runnable clearConfigHistory = () -> { Runnable clearConfigHistory = () -> {
log.warn("clearConfigHistory start"); log.warn("clearConfigHistory start");
if (canExecute()) { if (canExecute()) {
try { try {
Timestamp startTime = getBeforeStamp(TimeUtils.getCurrentTime(), Timestamp startTime = getBeforeStamp(TimeUtils.getCurrentTime(),
24 * getRetentionDays()); 24 * getRetentionDays());
int totalCount = persistService int totalCount = persistService
.findConfigHistoryCountByTime(startTime); .findConfigHistoryCountByTime(startTime);
if (totalCount > 0) { if (totalCount > 0) {
int pageSize = 1000; int pageSize = 1000;
int removeTime = (totalCount + pageSize - 1) / pageSize; int removeTime = (totalCount + pageSize - 1) / pageSize;
log.warn( log.warn(
"clearConfigHistory, getBeforeStamp:{}, totalCount:{}, pageSize:{}, removeTime:{}", "clearConfigHistory, getBeforeStamp:{}, totalCount:{}, pageSize:{}, removeTime:{}",
new Object[] { startTime, totalCount, pageSize, startTime, totalCount, pageSize, removeTime);
removeTime }); while (removeTime > 0) {
while (removeTime > 0) { // 分页删除以免批量太大报错
// 分页删除以免批量太大报错 persistService.removeConfigHistory(startTime, pageSize);
persistService.removeConfigHistory(startTime, pageSize); removeTime--;
removeTime--; }
} }
} }
} catch (Throwable e) {
catch (Throwable e) { log.error("clearConfigHistory error", e);
log.error("clearConfigHistory error", e); }
}
}
};
try {
dumpConfigInfo(dumpAllProcessor);
// 更新beta缓存
LogUtil.defaultLog.info("start clear all config-info-beta.");
DiskUtil.clearAllBeta();
if (persistService.isExistTable(BETA_TABLE_NAME)) {
dumpAllBetaProcessor
.process(DumpAllBetaTask.TASK_ID, new DumpAllBetaTask());
}
// 更新Tag缓存
LogUtil.defaultLog.info("start clear all config-info-tag.");
DiskUtil.clearAllTag();
if (persistService.isExistTable(TAG_TABLE_NAME)) {
dumpAllTagProcessor.process(DumpAllTagTask.TASK_ID, new DumpAllTagTask());
}
// add to dump aggr
List<ConfigInfoChanged> configList = persistService.findAllAggrGroup();
if (configList != null && !configList.isEmpty()) {
total = configList.size();
List<List<ConfigInfoChanged>> splitList = splitList(configList,
INIT_THREAD_COUNT);
for (List<ConfigInfoChanged> list : splitList) {
MergeAllDataWorker work = new MergeAllDataWorker(list);
work.start();
}
log.info("server start, schedule merge end.");
}
}
catch (Exception e) {
LogUtil.fatalLog
.error("Nacos Server did not start because dumpservice bean construction failure :\n"
+ e.getMessage(), e.getCause());
throw new RuntimeException(
"Nacos Server did not start because dumpservice bean construction failure :\n"
+ e.getMessage());
}
if (!ApplicationUtils.getStandaloneMode()) {
Runnable heartbeat = () -> {
String heartBeatTime = TimeUtils.getCurrentTime().toString();
// write disk
try {
DiskUtil.saveHeartBeatToDisk(heartBeatTime);
}
catch (IOException e) {
LogUtil.fatalLog.error("save heartbeat fail" + e.getMessage());
} }
}; };
TimerTaskService.scheduleWithFixedDelay(heartbeat, 0, 10, TimeUnit.SECONDS); try {
dumpConfigInfo(dumpAllProcessor);
long initialDelay = new Random().nextInt(INITIAL_DELAY_IN_MINUTE) + 10; // 更新beta缓存
LogUtil.defaultLog.warn("initialDelay:{}", initialDelay); LogUtil.defaultLog.info("start clear all config-info-beta.");
DiskUtil.clearAllBeta();
if (persistService.isExistTable(BETA_TABLE_NAME)) {
dumpAllBetaProcessor
.process(DumpAllBetaTask.TASK_ID, new DumpAllBetaTask());
}
// 更新Tag缓存
LogUtil.defaultLog.info("start clear all config-info-tag.");
DiskUtil.clearAllTag();
if (persistService.isExistTable(TAG_TABLE_NAME)) {
dumpAllTagProcessor
.process(DumpAllTagTask.TASK_ID, new DumpAllTagTask());
}
TimerTaskService.scheduleWithFixedDelay(dumpAll, initialDelay, // add to dump aggr
DUMP_ALL_INTERVAL_IN_MINUTE, TimeUnit.MINUTES); List<ConfigInfoChanged> configList = persistService.findAllAggrGroup();
if (configList != null && !configList.isEmpty()) {
total = configList.size();
List<List<ConfigInfoChanged>> splitList = splitList(configList,
INIT_THREAD_COUNT);
for (List<ConfigInfoChanged> list : splitList) {
MergeAllDataWorker work = new MergeAllDataWorker(list);
work.start();
}
log.info("server start, schedule merge end.");
}
}
catch (Exception e) {
LogUtil.fatalLog
.error("Nacos Server did not start because dumpservice bean construction failure :\n"
+ e.getMessage(), e.getCause());
throw new RuntimeException(
"Nacos Server did not start because dumpservice bean construction failure :\n"
+ e.getMessage());
}
if (!ApplicationUtils.getStandaloneMode()) {
Runnable heartbeat = () -> {
String heartBeatTime = TimeUtils.getCurrentTime().toString();
// write disk
try {
DiskUtil.saveHeartBeatToDisk(heartBeatTime);
}
catch (IOException e) {
LogUtil.fatalLog.error("save heartbeat fail" + e.getMessage());
}
};
TimerTaskService.scheduleWithFixedDelay(dumpAllBeta, initialDelay, TimerTaskService
DUMP_ALL_INTERVAL_IN_MINUTE, TimeUnit.MINUTES); .scheduleWithFixedDelay(heartbeat, 0, 10, TimeUnit.SECONDS);
long initialDelay = new Random().nextInt(INITIAL_DELAY_IN_MINUTE) + 10;
LogUtil.defaultLog.warn("initialDelay:{}", initialDelay);
TimerTaskService.scheduleWithFixedDelay(dumpAll, initialDelay,
DUMP_ALL_INTERVAL_IN_MINUTE, TimeUnit.MINUTES);
TimerTaskService.scheduleWithFixedDelay(dumpAllBeta, initialDelay,
DUMP_ALL_INTERVAL_IN_MINUTE, TimeUnit.MINUTES);
}
TimerTaskService
.scheduleWithFixedDelay(clearConfigHistory, 10, 10, TimeUnit.MINUTES);
}
finally {
TimerContext.end(LogUtil.dumpLog);
} }
TimerTaskService
.scheduleWithFixedDelay(clearConfigHistory, 10, 10, TimeUnit.MINUTES);
} }

View File

@ -16,7 +16,7 @@ public class DumpServiceTest {
DumpService service; DumpService service;
@Test @Test
public void init() throws Exception { public void init() throws Throwable {
service.init(); service.init();
} }
} }

View File

@ -181,4 +181,4 @@ public final class ProtocolMetaData {
} }
} }
} }
} }

View File

@ -112,7 +112,6 @@ public class StartingSpringApplicationRunListener
@Override @Override
public void started(ConfigurableApplicationContext context) { public void started(ConfigurableApplicationContext context) {
starting = false; starting = false;
ConfigurableEnvironment env = context.getEnvironment(); ConfigurableEnvironment env = context.getEnvironment();
closeExecutor(); closeExecutor();
@ -160,10 +159,10 @@ public class StartingSpringApplicationRunListener
closeExecutor(); closeExecutor();
context.close();
LOGGER.error("Nacos failed to start, please see {} for more details.", LOGGER.error("Nacos failed to start, please see {} for more details.",
Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log"));
context.close();
} }
/** /**

View File

@ -16,7 +16,6 @@
package com.alibaba.nacos.core.utils; package com.alibaba.nacos.core.utils;
import com.alibaba.nacos.common.utils.LoggerUtils;
import com.alibaba.nacos.common.utils.Pair; import com.alibaba.nacos.common.utils.Pair;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -39,7 +38,7 @@ public class TimerContext {
public static void end(final Logger logger) { public static void end(final Logger logger) {
long endTime = System.currentTimeMillis(); long endTime = System.currentTimeMillis();
Pair<String, Long> record = TIME_RECORD.get(); Pair<String, Long> record = TIME_RECORD.get();
LoggerUtils.printIfDebugEnabled(logger, "{} cost time : {} ms", record.getFirst(), (endTime - record.getSecond())); logger.info("{} cost time : {} ms", record.getFirst(), (endTime - record.getSecond()));
TIME_RECORD.remove(); TIME_RECORD.remove();
} }