批量流程导入接口加入日志输出

This commit is contained in:
anhc 2022-07-07 00:22:14 +08:00
parent 798b56d6bf
commit 7e7dbfbaca
3 changed files with 328 additions and 13 deletions

View File

@ -0,0 +1,211 @@
package com.actionsoft.apps.coe.pal.datamigration.util;
import com.actionsoft.apps.coe.pal.datamigration.aris.constant.ArisConstant;
import com.actionsoft.apps.coe.pal.datamigration.constant.Constant;
import com.actionsoft.apps.coe.pal.datamigration.log.cache.LogRealTimeCountCache;
import com.actionsoft.apps.coe.pal.datamigration.log.model.LogModel;
import com.actionsoft.apps.resource.plugin.profile.DCPluginProfile;
import com.actionsoft.bpms.server.UserContext;
import com.actionsoft.bpms.server.fs.DCContext;
import com.actionsoft.bpms.server.fs.dc.DCProfileManager;
import com.actionsoft.bpms.util.UUIDGener;
import java.io.File;
import java.io.IOException;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.Date;
public class WordLogUtil {
private UserContext uc;
private String logId;
private String logPath;
private File infoLogFile;
private File warnLogFile;
private File errorLogFile;
private File fullLogFile;
private String filePath;
private String fileName;
public WordLogUtil(UserContext _uc){
this.uc = _uc;
logId = UUIDGener.getUUID();// 记录缓存
String fileValue = "AttrtibuteImported-" + new SimpleDateFormat("yyyy-MM-dd HH-mm-ss").format(new Date()) + "=" + _uc.getUID();
// 创建dc目录
DCPluginProfile dcProfile = DCProfileManager.getDCProfile(Constant.APP_ID, Constant.UPFILE);
DCContext dc = new DCContext(_uc, dcProfile, Constant.APP_ID, "log", fileValue);
String dirPath = dc.getPath();
File file = new File(dirPath);
if (!file.exists()){
file.mkdirs();
}
try {
infoLogFile = new File(dirPath,"simpleImport.log");
if (!infoLogFile.exists()){
infoLogFile.createNewFile();
}
fullLogFile = new File(dirPath,"fullImport.log");
if (!fullLogFile.exists()){
fullLogFile.createNewFile();
}
warnLogFile = new File(dirPath,"warningImport.log" );
if (!warnLogFile.exists()){
warnLogFile.createNewFile();
}
errorLogFile = new File(dirPath,"errorImport.log");
if (!errorLogFile.exists()){
errorLogFile.createNewFile();
}
} catch (IOException e) {
e.printStackTrace();
}
logPath = fullLogFile.getPath();// 前端实时展示的日志
}
public void appendInfoLog(String content) {
LogUtil.appendLog(content,infoLogFile);
}
public void appendFullLog(String content) {
LogUtil.appendLog(content,fullLogFile);
}
public void appendErrorLog(String content) {
LogUtil.appendLog(content,errorLogFile);
}
public void appendWarnLog(String content) {
LogUtil.appendLog(content,warnLogFile);
}
public void appendAllAndInfoLog(String content) {
LogUtil.appendLog(content,fullLogFile,infoLogFile);
}
public void appendAllAndWarnLog(String content) {
LogUtil.appendLog(content,fullLogFile,warnLogFile);
}
public void appendAllAndErrorLog(String content) {
LogUtil.appendLog(content,fullLogFile,errorLogFile);
}
public void appendAllLog(String content) {
LogUtil.appendLog(content,infoLogFile,fullLogFile,warnLogFile,errorLogFile);
}
/**
* 记录一次批量上传行为
* @param id
* @param wsId 资产库id
* @param fileName 批量记录名称
* @param filePath 批量上传的dc地址
* @param createDate 创建时间
*/
public void createLogModel(String id, String wsId, String fileName, String filePath, Timestamp createDate) {
LogModel model = new LogModel();
model.setId(id);
model.setWsId(wsId);
model.setFileName(fileName);
model.setFilePath(filePath);
model.setLogPath(logPath);
model.setCreateUser(uc.getUID());
model.setCreateUserName(uc.getUserName());
model.setCreateDate(createDate);
model.setStartDate(createDate);
model.setResultStatus(Constant.LOG_RESULT_StATUS_RUN);
model.setMainInfo("导入进行中");
// 存储日志db
LogUtil.createLog(model);
}
/**
* 获取此次批量结果状态值
* @return
*/
public int getLogResultStatus(){
return LogUtil.queryLog(logId).getResultStatus();
}
public void updateErrLog(String logFileMsg, String logDbMsg) {
// 记录日志
this.appendAllAndErrorLog(logFileMsg);
// 日志表记录导入结果
LogUtil.updateLog(logId, new Timestamp(System.currentTimeMillis()), Constant.LOG_RESULT_StATUS_ERROR, logDbMsg);
// 清空缓存
// LogRealTimeCountCache.getCache().remove(logId);
}
public String getLogId() {
return logId;
}
public void setLogId(String logId) {
this.logId = logId;
}
public String getLogPath() {
return logPath;
}
public void setLogPath(String logPath) {
this.logPath = logPath;
}
public File getInfoLogFile() {
return infoLogFile;
}
public void setInfoLogFile(File infoLogFile) {
this.infoLogFile = infoLogFile;
}
public File getWarnLogFile() {
return warnLogFile;
}
public void setWarnLogFile(File warnLogFile) {
this.warnLogFile = warnLogFile;
}
public File getErrorLogFile() {
return errorLogFile;
}
public void setErrorLogFile(File errorLogFile) {
this.errorLogFile = errorLogFile;
}
public File getFullLogFile() {
return fullLogFile;
}
public void setFullLogFile(File fullLogFile) {
this.fullLogFile = fullLogFile;
}
public String getFilePath() {
return filePath;
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
}

View File

@ -9,9 +9,7 @@ import com.actionsoft.apps.coe.pal.datamigration.aris.web.ArisXmlImportWeb;
import com.actionsoft.apps.coe.pal.datamigration.constant.Constant;
import com.actionsoft.apps.coe.pal.datamigration.model.po.AwsOrgInfo;
import com.actionsoft.apps.coe.pal.datamigration.model.po.WordField;
import com.actionsoft.apps.coe.pal.datamigration.util.ExcelUtil;
import com.actionsoft.apps.coe.pal.datamigration.util.ShapeUtil;
import com.actionsoft.apps.coe.pal.datamigration.util.WordUtil;
import com.actionsoft.apps.coe.pal.datamigration.util.*;
import com.actionsoft.apps.coe.pal.datamigration.util.readword.CreateMaps;
import com.actionsoft.apps.coe.pal.log.CoEOpLogAPI;
import com.actionsoft.apps.coe.pal.log.CoEOpLogConst;
@ -134,15 +132,57 @@ public class DataMigrationWeb extends ActionWeb {
if (null == fileDcContext) {
return ResponseObject.newErrResponse("文件参数异常").toString();
}
//批量解析文档
File dirFile = new File(fileDcContext.getPath());
File[] fileArr = dirFile.listFiles();
//日志记录名称搭建
String fileName = "";
for (int i = 0; i < fileArr.length; i++) {
File file = fileArr[i];
fileName = StringUtils.isEmpty(fileName) ? fileName + file.getName() : ","+ fileName + file.getName();
if (i>=3){
fileName += "";
break;
}
}
Timestamp startTime = new Timestamp(System.currentTimeMillis());
WordLogUtil wordLogUtil = new WordLogUtil(_uc);
//新建数据库记录
wordLogUtil.createLogModel(wordLogUtil.getLogId(),wsId,fileName,fileDcContext.getPath(),startTime);
//初始化日志
wordLogUtil.appendInfoLog("注:该日志文件存储简要日志信息");
wordLogUtil.appendFullLog("注:该日志文件存储详细日志信息");
wordLogUtil.appendErrorLog("注:该日志文件记录错误日志信息");
wordLogUtil.appendWarnLog("注:该日志文件记录警告日志信息");
wordLogUtil.appendAllLog("\n[信息]操作人:" + _uc.getUserName() + "<" + _uc.getUID() + ">");
wordLogUtil.appendAllLog("[信息]操作时间:" + UtilDate.datetimeFormat(startTime));
// 校验资产库是否存在可用
boolean isActive = PALRepositoryQueryAPIManager.getInstance().isActiveWorkSpace(wsId);
if (!isActive) {
String msg = Constant.LOG_ERROR + "资产库不存在或已停用," + Constant.IMPORT_STOP_MSG;
wordLogUtil.updateErrLog(msg, msg);
return ResponseObject.newErrResponse("资产库不存在或已停用").toString();
}
new Thread(new Runnable() {
@Override
public void run() {
for (File file : fileArr) {
new DataMigrationWeb().analysisWord(wsId, file.getPath());
wordLogUtil.appendAllLog("\n[信息]流程属性Word文件" + file.getName());
wordLogUtil.appendAllLog("[信息]流程属性Word文件路径"+ file.getPath());
wordLogUtil.appendAllLog("-----------流程属性Word文件 Begin " + UtilDate.datetimeFormat(startTime));
ResponseObject res = new DataMigrationWeb().analysisWord(wsId, file.getPath(), wordLogUtil);
if (res.isErr()){
wordLogUtil.appendAllAndErrorLog(Constant.LOG_ERROR + "word文件--"+file.getName()+" 解析异常:"+res.getMsg()+" 中断解析\n");
}
if (res.isOk()){
wordLogUtil.appendAllAndInfoLog(Constant.LOG_END+ "word文件--"+file.getName()+" :"+res.getMsg()+"\n");
}
}
}
}).start();
@ -375,7 +415,7 @@ public class DataMigrationWeb extends ActionWeb {
return index == null ? 0 : index;
}
private ResponseObject analysisWord(String wsId,String filePath){
private ResponseObject analysisWord(String wsId,String filePath,WordLogUtil logUtil){
//解析Word文档
WordUtil wordUtil = new WordUtil();
boolean analysisFlag = wordUtil.analysisWordTable(filePath);
@ -395,6 +435,9 @@ public class DataMigrationWeb extends ActionWeb {
PALRepositoryPropertyDao propDao = new PALRepositoryPropertyDao();
DesignerShapeRelationDao relationDao = new DesignerShapeRelationDao();
List<PALRepositoryPropertyModel> importPropertyList = propDao.getPropertysByPlid(importModel.getId(), "");
if (importPropertyList.isEmpty()){
return ResponseObject.newErrResponse("流程文件属性配置异常,没有获取到文件属性");
}
for (PALRepositoryPropertyModel importProperty : importPropertyList) {
WordField<Object> wordField = fileFieldMap.get(importProperty.getPropertyName());
//旧版本的映射字段支持
@ -410,12 +453,15 @@ public class DataMigrationWeb extends ActionWeb {
if (null == wordField) {
continue;
}
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 获取Word解析数据成功["+importProperty.getPropertyName()+"]");
//使用缓存获取具体建模属性
PALRepositoryAttributeModel repositoryAttrModel = PALRepositoryAttributeCache.getAttributeByMethodIdAndAttrId(wsId, importModel.getMethodId(), importProperty.getPropertyId());
if (null == repositoryAttrModel) {
logUtil.appendAllAndErrorLog(Constant.LOG_ERROR+" 获取建模属性失败:["+importProperty.getPropertyName()+"]");
continue;
}
if ("table".equals(repositoryAttrModel.getType())) {
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"]类型为:表格");
//将解析的列表数据转化为数据存储json
JSONObject object = new JSONObject();
object.put("name", importProperty.getPropertyName());
@ -431,6 +477,8 @@ public class DataMigrationWeb extends ActionWeb {
array.add(obj);
}
}
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 获取["+importProperty.getPropertyName()+"] word 解析数据为 空");
}
if (array.size() > 0) {
//特殊处理表头
@ -453,18 +501,31 @@ public class DataMigrationWeb extends ActionWeb {
object.put("table", array);
//更新数据库值
importProperty.setPropertyValue(object.toJSONString());
propDao.update(importProperty);
int update = propDao.update(importProperty);
if (update>0){
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值成功");
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值失败");
}
} else if ("awsorg".equals(repositoryAttrModel.getType())) {
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"]类型为BPM组织架构");
//aws组织架构解析多组结构解析
String orgName = (String) wordField.getData();
List<AwsOrgInfo> awsOrgInfos = this.getOrgByName(orgName);
if (!awsOrgInfos.isEmpty()) {
//先更新property数据 [{"name":"部门3","id":"fdea04c8-502f-4367-82b7-a5ebe0ce5f67","type":"department"}]
importProperty.setPropertyValue(JSON.toJSONString(awsOrgInfos));
propDao.update(importProperty);
int update = propDao.update(importProperty);
if (update>0){
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值成功");
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值失败");
}
//先删除property关联关系然后新增关系
relationDao.deleteByAttrId(importModel.getId(), "", importProperty.getPropertyId());
int insert = 0;
for (AwsOrgInfo awsOrgInfo : awsOrgInfos) {
DesignerShapeRelationModel model = new DesignerShapeRelationModel();
model.setFileId(importModel.getId());
@ -474,17 +535,31 @@ public class DataMigrationWeb extends ActionWeb {
model.setRelationFileId("00000000-0000-0000-0000-000000000000");
model.setRelationShapeId("00000000-0000-0000-0000-000000000000");
model.setRelationShapeText(JSON.toJSONString(awsOrgInfo));
relationDao.insert(model);
insert += relationDao.insert(model);
}
if (insert == awsOrgInfos.size()){
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 保存关联关系成功");
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 保存关联关系失败");
}
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 获取["+importProperty.getPropertyName()+"] BPM组织架构为 空");
}
} else if ("relation".equals(repositoryAttrModel.getType())) {
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"]类型为pal模型关联");
//关联文件
List<PALRepositoryModel> fileList = new ArrayList<>();
List<String> relations = (List<String>) wordField.getData();
if (relations.isEmpty()){
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 获取["+importProperty.getPropertyName()+"] pal模型关联数据为 空");
}
for (String relation : relations) {
PALRepositoryModel repository = this.getRepositoryByName(wsId, this.trimFileName(relation));
if (repository != null) {
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 获取关联pal模型<"+this.trimFileName(relation)+"> 成功");
fileList.add(repository);
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 获取关联pal模型<"+this.trimFileName(relation)+"> 失败");
}
}
@ -499,10 +574,16 @@ public class DataMigrationWeb extends ActionWeb {
JSONObject propertyJson = JSON.parseObject(importProperty.getPropertyValue());
propertyJson.put("relationFileId", relationFileIds);
importProperty.setPropertyValue(propertyJson.toJSONString());
propDao.update(importProperty);
int update = propDao.update(importProperty);
if (update>0){
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值成功");
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值失败");
}
//先删除property关联关系然后新增关系
if (!fileList.isEmpty()) {
int insert = 0;
relationDao.deleteByAttrId(importModel.getId(), "", importProperty.getPropertyId());
for (PALRepositoryModel repositoryModel : fileList) {
DesignerShapeRelationModel relationModel = new DesignerShapeRelationModel();
@ -513,7 +594,12 @@ public class DataMigrationWeb extends ActionWeb {
relationModel.setRelationFileId(repositoryModel.getVersionId());
relationModel.setRelationShapeId("");
relationModel.setRelationShapeText(repositoryModel.getName());
relationDao.insert(relationModel);
insert += relationDao.insert(relationModel);
}
if (insert == fileList.size()){
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 保存关联关系成功");
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 保存关联关系失败");
}
}
@ -522,7 +608,11 @@ public class DataMigrationWeb extends ActionWeb {
} else if ("boolean".equals(repositoryAttrModel.getType())) {
} else if ("DateTimePicker".equals(repositoryAttrModel.getType())) {
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"]类型为:时间选择器");
String time = (String) wordField.getData();
if (StringUtils.isEmpty(time)){
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 获取["+importProperty.getPropertyName()+"] word 解析时间字符为 空");
}
try {
Date formatTime = new SimpleDateFormat("yyyy年MM月dd日").parse(time);
time = UtilDate.datetimeFormat(formatTime);
@ -531,14 +621,28 @@ public class DataMigrationWeb extends ActionWeb {
}
//更新数据库值
importProperty.setPropertyValue(time);
propDao.update(importProperty);
int update = propDao.update(importProperty);
if (update>0){
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值成功");
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值失败");
}
} else {
if (wordField.getData() instanceof String) {
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"]类型为:字符");
//直接获取Word解析字符内容
String value = (String) wordField.getData();
if (StringUtils.isEmpty(value)){
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 获取["+importProperty.getPropertyName()+"] word 解析字符为 空");
}
//更新数据库值
importProperty.setPropertyValue(value);
propDao.update(importProperty);
int update = propDao.update(importProperty);
if (update>0){
logUtil.appendAllAndInfoLog(Constant.LOG_DESC+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值成功");
}else {
logUtil.appendAllAndWarnLog(Constant.LOG_WARNING+" 文件属性["+importProperty.getPropertyName()+"] 保存属性值失败");
}
}
}