Merge remote-tracking branch 'origin/pet' into pet
commit
d41cea6aeb
|
@ -108,6 +108,7 @@ public class TextLibraryController extends BaseController {
|
|||
}
|
||||
if (StringUtils.isEmpty(pd.getString("CORPINFO_ID")))pd.put("CORPINFO_ID",Jurisdiction.getCORPINFO_ID());
|
||||
|
||||
|
||||
if ("3".equals(pd.getString("ASSOCIATION"))) {
|
||||
pd.put("TYPE_ONE", "43ed4012090d4614bb35da60d06c8264");
|
||||
pd.put("plan",pd.getString("CATEGORY_ID"));
|
||||
|
@ -124,6 +125,11 @@ public class TextLibraryController extends BaseController {
|
|||
}
|
||||
|
||||
pd.put("STATUS","1");
|
||||
// 标准规范库特殊处理
|
||||
if ("~".equals(pd.getString("CORPINFO_ID"))) {
|
||||
pd.put("CORPINFO_ID", null);
|
||||
pd.put("STATUS", null);
|
||||
}
|
||||
|
||||
page.setPd(pd);
|
||||
List<PageData> varList = textlibraryService.list(page);
|
||||
|
|
|
@ -54,6 +54,10 @@ public class TextLibraryServiceImpl implements TextLibraryService {
|
|||
@Value("${heBeiQinAnFile}")
|
||||
private String heBeiQinAnFile;
|
||||
|
||||
@Value("${biaoZhunShuJuKu}")
|
||||
private String biaoZhunShuJuKu;
|
||||
|
||||
|
||||
/**
|
||||
* 新增
|
||||
*
|
||||
|
@ -107,8 +111,12 @@ public class TextLibraryServiceImpl implements TextLibraryService {
|
|||
}
|
||||
// 如果是迁移数据就用河北秦安的路径
|
||||
for (PageData entity : list) {
|
||||
if(!"1".equals(entity.getString("MIGRATION_FLAG"))){
|
||||
entity.put("PATH",heBeiQinAnFile + entity.getString("PATH"));
|
||||
String url = entity.getString("PATH");
|
||||
if (!"1".equals(entity.getString("MIGRATION_FLAG"))) {
|
||||
entity.put("PATH", heBeiQinAnFile + url);
|
||||
}
|
||||
if ("2".equals(entity.getString("MIGRATION_FLAG"))) {
|
||||
entity.put("PATH", biaoZhunShuJuKu + url);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
|
|
|
@ -992,37 +992,24 @@ public class XgfUserServiceImpl implements XgfUserService {
|
|||
|
||||
@Override
|
||||
public void repulse(PageData flows, XgfFlowDto info) throws Exception {
|
||||
// add by liu jun 相关方可指定打回至指定节点 -1 为默认打回至相关方端
|
||||
if ("-1".equals(info.getBACK_STEP())){
|
||||
PageData condition = new PageData();
|
||||
condition.put("XGF_USER_ID", flows.getString("FLOWS_ID"));
|
||||
PageData entity = xgfUserMapper.findById(condition);
|
||||
entity.put("STATUS", "0");
|
||||
entity.put("VALID_FLAG", "0");
|
||||
entity.put("CHECK_STATUS", "-2");
|
||||
PageData key = new PageData();
|
||||
key.putAll(entity);
|
||||
key.put("USER_ID", condition.get("XGF_USER_ID"));
|
||||
key.put("STATUS", "1");
|
||||
key.put("OPINION", flows.get("OPINION"));
|
||||
Map result = HttpClientService.doPost(prevention_xgf_url + "openApi/user/approve", key);
|
||||
if (result == null || !"succeed".equals(result.get("result"))) {
|
||||
throw new RuntimeException("请求失败");
|
||||
}
|
||||
xgfUserMapper.edit(entity);
|
||||
this.clearInfo(flows);
|
||||
xgfFlowsMapper.edit(flows);
|
||||
} else {
|
||||
// 1、判断不能自己打回到自己
|
||||
PageData condition = new PageData();
|
||||
condition.put("FLOWS_ID",flows.getString("FLOWS_ID"));
|
||||
PageData flowEntity = xgfFlowsMapper.findById(condition);
|
||||
if (flowEntity != null && flowEntity.size() > 0 && flowEntity.getString("FLOWS_STEP").equals(info.getBACK_STEP())){
|
||||
throw new RuntimeException("不能自己打回到自己");
|
||||
}
|
||||
flowEntity.put("FLOWS_STEP", info.getBACK_STEP());
|
||||
xgfFlowsMapper.edit(flowEntity);
|
||||
PageData condition = new PageData();
|
||||
condition.put("XGF_USER_ID", flows.getString("FLOWS_ID"));
|
||||
PageData entity = xgfUserMapper.findById(condition);
|
||||
entity.put("STATUS", "0");
|
||||
entity.put("VALID_FLAG", "0");
|
||||
entity.put("CHECK_STATUS", "-2");
|
||||
PageData key = new PageData();
|
||||
key.putAll(entity);
|
||||
key.put("USER_ID", condition.get("XGF_USER_ID"));
|
||||
key.put("STATUS", "1");
|
||||
key.put("OPINION", flows.get("OPINION"));
|
||||
Map result = HttpClientService.doPost(prevention_xgf_url + "openApi/user/approve", key);
|
||||
if (result == null || !"succeed".equals(result.get("result"))) {
|
||||
throw new RuntimeException("请求失败");
|
||||
}
|
||||
xgfUserMapper.edit(entity);
|
||||
this.clearInfo(flows);
|
||||
xgfFlowsMapper.edit(flows);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -100,6 +100,7 @@ http.file.url=http://192.168.192.201:8991/file/
|
|||
|
||||
#河北秦安文件服务器前缀
|
||||
heBeiQinAnFile=https://file.zcloudchina.com/YTHFile
|
||||
biaoZhunShuJuKu=https://file.zcloudchina.com/
|
||||
|
||||
liteflow.rule-source=flow.xml
|
||||
liteflow.print-execution-log=false
|
||||
|
|
|
@ -32,7 +32,14 @@
|
|||
f.STATUS,
|
||||
f.BUS_TEXT_LIBRARY_ID,
|
||||
f.MIGRATION_FLAG,
|
||||
f.CORPINFO_ID
|
||||
f.CORPINFO_ID,
|
||||
f.CODE_NAME,
|
||||
f.PUBLISHES,
|
||||
f.TEXT_NAME,
|
||||
f.PUBLICATION_DATE,
|
||||
f.IMPLEMENTATION_DATE,
|
||||
f.TYPE_TWO,
|
||||
f.TYPE_TWO_NAME
|
||||
</sql>
|
||||
|
||||
<!-- 字段用于新增 -->
|
||||
|
@ -54,8 +61,15 @@
|
|||
ASSOCIATION,
|
||||
STATUS,
|
||||
BUS_TEXT_LIBRARY_ID,
|
||||
MIGRATION_FLAG,
|
||||
CORPINFO_ID,
|
||||
MIGRATION_FLAG
|
||||
CODE_NAME,
|
||||
PUBLISHES,
|
||||
TEXT_NAME,
|
||||
PUBLICATION_DATE,
|
||||
IMPLEMENTATION_DATE,
|
||||
TYPE_TWO,
|
||||
TYPE_TWO_NAME
|
||||
</sql>
|
||||
|
||||
<!-- 字段值 -->
|
||||
|
@ -77,8 +91,15 @@
|
|||
#{ASSOCIATION},
|
||||
#{STATUS},
|
||||
#{BUS_TEXT_LIBRARY_ID},
|
||||
#{MIGRATION_FLAG},
|
||||
#{CORPINFO_ID},
|
||||
#{MIGRATION_FLAG}
|
||||
#{CODE_NAME},
|
||||
#{PUBLISHES},
|
||||
#{TEXT_NAME},
|
||||
#{PUBLICATION_DATE},
|
||||
#{IMPLEMENTATION_DATE},
|
||||
#{TYPE_TWO},
|
||||
#{TYPE_TWO_NAME}
|
||||
</sql>
|
||||
|
||||
<!-- 新增-->
|
||||
|
@ -124,6 +145,13 @@
|
|||
STATUS = #{STATUS},
|
||||
CORPINFO_ID = #{CORPINFO_ID},
|
||||
MIGRATION_FLAG = #{MIGRATION_FLAG},
|
||||
CODE_NAME = #{CODE_NAME},
|
||||
PUBLISHES = #{PUBLISHES},
|
||||
TEXT_NAME = #{TEXT_NAME},
|
||||
PUBLICATION_DATE = #{PUBLICATION_DATE},
|
||||
IMPLEMENTATION_DATE = #{IMPLEMENTATION_DATE},
|
||||
TYPE_TWO = #{TYPE_TWO},
|
||||
TYPE_TWO_NAME = #{TYPE_TWO_NAME},
|
||||
BUS_TEXT_LIBRARY_ID = BUS_TEXT_LIBRARY_ID
|
||||
where
|
||||
BUS_TEXT_LIBRARY_ID = #{BUS_TEXT_LIBRARY_ID}
|
||||
|
@ -152,6 +180,9 @@
|
|||
<if test="pd.KEYWORDS != null and pd.KEYWORDS != ''"><!-- 关键词检索 -->
|
||||
and f.REMARKS LIKE CONCAT(CONCAT('%', #{pd.KEYWORDS}),'%')
|
||||
</if>
|
||||
<if test="pd.KEYWORD != null and pd.KEYWORD != ''"><!-- 关键词检索 -->
|
||||
and (f.TYPE_NAME LIKE CONCAT(CONCAT('%', #{pd.KEYWORD}),'%') or f.TYPE_TWO_NAME LIKE CONCAT(CONCAT('%', #{pd.KEYWORD}),'%'))
|
||||
</if>
|
||||
<if test="pd.LABELS != null and pd.LABELS.length > 0">
|
||||
and exists(select 1 from bus_library_labels b where b.BUS_TEXT_LIBRARY_ID = f.BUS_TEXT_LIBRARY_ID and
|
||||
b.BUS_LABEL_FACTORY_ID in
|
||||
|
@ -184,6 +215,15 @@
|
|||
<if test="pd.LIBRARY_FLAG != null and pd.LIBRARY_FLAG != ''">
|
||||
and (f.MIGRATION_FLAG = '0' or f.MIGRATION_FLAG is null)
|
||||
</if>
|
||||
<if test="pd.TYPE != null and pd.TYPE != ''">
|
||||
and f.TYPE = #{pd.TYPE}
|
||||
</if>
|
||||
<if test="pd.TYPE_TWO != null and pd.TYPE_TWO != ''">
|
||||
and f.TYPE_TWO like CONCAT('%' #{pd.TYPE_TWO}, '%')
|
||||
</if>
|
||||
<if test="pd.TEXT_NAME != null and pd.TEXT_NAME != ''">
|
||||
and f.TEXT_NAME like CONCAT('%' #{pd.TEXT_NAME}, '%')
|
||||
</if>
|
||||
order by f.ISTOPTIME desc,f.CREATED_TIME desc
|
||||
</select>
|
||||
|
||||
|
|
Loading…
Reference in New Issue