package com.dderp.business.service; import com.dderp.business.dao.*; import com.dderp.common.api.*; import com.dderp.common.base.BaseService; import com.dderp.common.datas.ERPModule; import com.dderp.common.datas.ESKeys; import com.dderp.common.datas.HttpCode; import com.dderp.common.datas.RedisKeys; import com.dderp.common.entity.base.BusinessBookPeriod; import com.dderp.common.entity.base.DataBaseMultiItemEx; import com.dderp.common.entity.base.ProcessMapItem; import com.dderp.common.entity.doc.BusinessScript; import com.dderp.common.entity.site.ERPTokenUser; import com.dderp.common.entity.system.ConfigValue; import com.dderp.common.entity.system.KeyValuePair; import com.dderp.common.tool.ERPUtils; import com.dderp.common.tool.ParticularTimeTasker; import com.dySweetFishPlugin.elasticsearch.ESClient; import com.dySweetFishPlugin.redis.RedisService; import com.dySweetFishPlugin.sql.DBService; import com.dySweetFishPlugin.sql.DataBaseMultiItem; import com.dySweetFishPlugin.sql.RMapUtils; import com.dySweetFishPlugin.sql.TableIdService; import com.dySweetFishPlugin.sql.dao.TunaService; import com.dySweetFishPlugin.tool.lang.ThreadFactoryWithNamePrefix; import com.sweetfish.convert.json.JsonConvert; import com.sweetfish.service.Local; import com.sweetfish.service.RetResult; import com.sweetfish.util.AnyValue; import com.sweetfish.util.AutoLoad; import com.sweetfish.util.ResourceType; import com.sweetfish.util.Utility; import groovy.lang.Binding; import groovy.util.GroovyScriptEngine; import groovy.util.ResourceException; import groovy.util.ScriptException; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.StringUtils; import org.beetl.core.Configuration; import org.beetl.core.GroupTemplate; import org.beetl.core.Template; import org.beetl.core.resource.FileResourceLoader; import org.rex.DB; import org.rex.RMap; import org.rex.db.exception.DBException; import javax.annotation.Priority; import javax.annotation.Resource; import java.io.File; import java.io.IOException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.*; import java.util.concurrent.*; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; /** * Created by 81460 on 2020-03-23 * 增加网站初始化服务 * * @author 81460 */ @AutoLoad(false) @Local @ResourceType(SupplierInitService.class) @Priority(7999) public class SupplierInitImpl extends BaseService implements SupplierInitService { @Resource ESClient esClient; @Resource TunaService tunaService; @Resource TableIdService tableIdService; @Resource RedisService redisService; @Resource JsonConvert jsonConvert; @Resource DBService dbService; @Resource NoSqlKeysService keysService; @Resource SystemService systemService; @Resource PermissionService permissionService; @Resource ConfigService configService; @Resource(name = "APP_HOME") private String appHome; @Resource(name = "property.sysRunMode") private String sysRunMode; @Resource(name = "property.bookStartYear") private int bookStartYear; @Resource(name = "property.bookEndYear") private int bookEndYear; @Resource(name = "property.bookSplitKind") private int bookSplitKind; @Resource(name = "property.fileDownloadUrl") private String downloadUrl; @Resource(name = "property.outFileDownloadUrl") private String outDownloadUrl; private DocDao docDao; private SystemDao systemDao; private PlatformDao platformDao; private StoreDao storeDao; private OrderDao orderDao; private ExecutorService initInfoExecutor; private final List scheduleThreadList = new ArrayList<>(); private final List particularTimeTaskList = new ArrayList<>(); private final Map esIndexMap = new ConcurrentHashMap<>(); private final ConcurrentHashMap sysDataBaseItem = new ConcurrentHashMap<>(); private String redisHashListGet; private String runEnvironment; private boolean resetClientInfo; @Override public void init(AnyValue config) { initInfoExecutor = Executors.newFixedThreadPool(4, new ThreadFactoryWithNamePrefix("[初始化线程池]")); //数据库更新 boolean master = false; AnyValue masterValue = config.getAnyValue("master"); if (masterValue != null) { master = masterValue.getBoolValue("value"); } AnyValue resetClientValue = config.getAnyValue("resetClientInfo"); if (resetClientValue != null) { resetClientInfo = resetClientValue.getBoolValue("value"); } else { resetClientInfo = true; } try { List dataList = DB.getList("select * from sys_DataBaseMultiItem where voidFlag = 0 order by id", DataBaseMultiItemEx.class); dataList.forEach((x) -> sysDataBaseItem.put(x.getDataBaseAlias(), x.getShardingKey())); if (master) { //读取数据源,进行初始化操作 //region 更新sql语句 //一定要注意:更新的sql语句开发者应该在sql更新目录下保存一份,因为正式执行在sqlupdate目录中,执行后会删除文件 Path sqlPath = Paths.get(appHome + File.separator + "conf" + File.separator + "sqlupdate"); if (Files.exists(sqlPath) && (Files.isDirectory(sqlPath))) { try (Stream paths = Files.walk(sqlPath, 1).filter(Files::isRegularFile)) { FileResourceLoader resourceLoader = new FileResourceLoader(appHome + File.separator + "conf" + File.separator + "sqlupdate", "utf-8"); Configuration cfg = Configuration.defaultConfiguration(); GroupTemplate gt = new GroupTemplate(resourceLoader, cfg); paths.forEach((x) -> { Template t = gt.getTemplate(FilenameUtils.getBaseName(x.toFile().getName()) + "." + FilenameUtils.getExtension(x.toFile().getName())); dataList.forEach((d) -> { logger.info(d.getDataBaseAlias() + "执行系统初始化sql"); t.binding("supplyCode", d.getShardingKey()); String content = t.render(); if (StringUtils.isNotEmpty(content)) { try { content = ERPUtils.replaceEscape(content); dbService.batchUpdate(d.getDataBaseAlias(), content.split(";")); logger.info(d.getDataBaseAlias() + "执行初始化sql成功"); } catch (DBException e) { logger.error("执行初始化sql失败: " + e.getMessage(), e); } } }); try { Files.deleteIfExists(x); } catch (IOException e) { logger.error("执行初始化sql失败: " + e.getMessage(), e); } }); resourceLoader.close(); } catch (IOException e) { logger.error("执行初始化sql失败: " + e.getMessage(), e); } } //endregion //region 更新es //es的更新有些复杂,没法用文件名表示,暂定为用rest服务手动更新 //endregion } } catch (DBException e) { logger.error("执行sql出错: " + e.getMessage(), e); } } @Override public void start(AnyValue config) { systemDao = tunaService.generate(SystemDao.class); docDao = tunaService.generate(DocDao.class); platformDao = tunaService.generate(PlatformDao.class); storeDao = tunaService.generate(StoreDao.class); orderDao = tunaService.generate(OrderDao.class); AnyValue environmentValue = config.getAnyValue("environment"); if (environmentValue != null) { runEnvironment = environmentValue.getValue("value"); } else { runEnvironment = "dev"; } boolean master = false; AnyValue masterValue = config.getAnyValue("master"); if (masterValue != null) { master = masterValue.getBoolValue("value"); } if (master) { Path path = Paths.get(appHome + File.separator + "conf" + File.separator + "redisscript" + File.separator + "lget_ldel.lua"); this.redisHashListGet = redisService.scriptLoad(readUTF8File(path, false)); //读取数据源,进行初始化操作 try { //把所有分库写入到redis中 redisService.del(keysService.getRedisKey(RedisKeys.KEY_SUPPLIERPLATFORM)); redisService.del(keysService.getRedisKey(RedisKeys.KEY_SUPPLIERPCODELATFORM)); List dataList = DB.getList("select * from sys_DataBaseMultiItem where voidFlag = 0 order by id", DataBaseMultiItemEx.class); if (!dataList.isEmpty()) { //检查数据有效性 for (DataBaseMultiItemEx item : dataList) { long supplierCode = Long.parseLong(item.getShardingKey()); if ((supplierCode <= 0L) || (supplierCode > 9999999L)) { //加入此条件的限制主要用来对supplierCode进行62进制编码 //因为有的在线支付在异步通知中不传递attach参数,为了统一管理,对商户订单号头四位进行处理, //实际是不能大于62^4=14,776,336的,为方便记忆,1千万足够用了 throw new RuntimeException("商户分厂的分表id超出范围,不能小于0或者大于9999999"); } } //写入到redis,方便前端获取 Map supplierClientMap = dataList.stream().collect( Collectors.toMap(x -> String.valueOf(x.getId()), x -> jsonConvert.convertTo(x)) ); Map supplierCodeClientMap = dataList.stream().collect( Collectors.toMap(DataBaseMultiItem::getShardingKey, x -> jsonConvert.convertTo(x)) ); if (!supplierClientMap.isEmpty()) { redisService.hmset(keysService.getRedisKey(RedisKeys.KEY_SUPPLIERPLATFORM), supplierClientMap); } if (!supplierCodeClientMap.isEmpty()) { redisService.hmset(keysService.getRedisKey(RedisKeys.KEY_SUPPLIERPCODELATFORM), supplierCodeClientMap); } } dataList.parallelStream().forEach((x) -> initSupplier(x.getDataBaseAlias(), Long.parseLong(x.getShardingKey()), x.getItemType())); } catch (DBException e) { logger.error(e.getMessage(), e); } } //操作日志,写一个key为0的,某些属于平台操作,无sullier key esClient.checkIndexEx(ESKeys.ESOPLOG_INDEX + "_0", ESKeys.INDEX_CONFIG, Utility.ofMap(ESKeys.ESERPDEFAULT_TYPE, "oplog.json")); } @Override public void destroy(AnyValue config) { scheduleThreadList.forEach(ScheduledThreadPoolExecutor::shutdown); if (initInfoExecutor != null) { initInfoExecutor.shutdown(); } particularTimeTaskList.forEach(ParticularTimeTasker::stop); } @SuppressWarnings("unchecked") public List redisListGetAndDel(String key, int range) { Object eval = redisService.evalsha(this.redisHashListGet, 1, key, String.valueOf(range)); return (List) eval; } /** * 商户初始化 * 2024-01-01 tt修改为按脚本执行,避免每个业务都来一遍 * 脚本规范:脚本按文件名顺序执行,所以文件名前面以 执行类型_顺序(数字)_ 开头,如果文件名以Ignore_开头(用于放置公共方法),则不执行 * 执行脚本传入所有的参数binding,当前还是固定的,原因见注释,编写规范见10_Inquire.groovy脚本(用的参数多可查看实现方式) * 最简单读数据库写redis的代码参考Init_19_ProductType.groovy,最简单写ES的代码参考Init_20_ProductImage.groovy * tt:我们的业务需求当前大概就碰到四种初始化脚本 * 1、查数据库初始化redis或者es,文件名为Init_开头 * 2、每隔多少分钟干活,可以抽象为间隔多少秒处理数据,文件名为 Schedule_序号_第一次执行秒_间隔秒_xxx * 3、定时任务,当前遇到的业务基本集中在按天,按月固定时间执行,文件名为Time_开头,具体见代码注释 * 其它的暂时没碰到,所以代码没必要写的像Quartz那样太通用 * 4、同步执行的业务,可能是创建目录之类的操作,以Run_开头执行算了(这类脚本看之前的代码理论上可以放在第一种任务中执行,为兼容加一种) * * @param dataSourceId 分库 * @param supplierCode 分表 * @param supplierType 商户类型 */ private void initSupplier(String dataSourceId, long supplierCode, int supplierType) { // 指定Groovy脚本的位置 String initScriptRoot = appHome + File.separator + "conf" + File.separator + "script" + File.separator + supplierCode + File.separator + "init"; //获取脚本目录下的所有文件,并按照文件名排序 File dir = new File(initScriptRoot); // 创建一个新的Binding对象,并设置变量 Binding binding = new Binding(Utility.ofMap( "redisService", redisService, "esClient", esClient, "keysService", keysService, "systemService", systemService, "permissionService", permissionService, "tableIdService", tableIdService, "supplierService", this, //其它参数都好说,就这一堆dao,如果放在脚本里面初始化,没测试过,脚本里面初始化了,其它Service在初始化能否正常使用, //暂时先放在java里面初始化dao并都传递过去 "systemDao", systemDao, "docDao", docDao, "platformDao", platformDao, "storeDao", storeDao, "orderDao", orderDao, "jsonConvert", jsonConvert, "dataSourceId", dataSourceId, "supplierCode", supplierCode, "runEnvironment", runEnvironment, "resetClientInfo", resetClientInfo, "sysRunMode", sysRunMode, "appHome", appHome, "logger", logger )); File[] scriptFiles = dir.listFiles(); if (scriptFiles != null) { GroovyScriptEngine gse; try { gse = new GroovyScriptEngine(initScriptRoot, this.getClass().getClassLoader()); //region 0、语言扩展方法,先执行,比如扩展一些Groovy方法 //SupplierInitImpl是不允许运行脚本的,因为它本身需要初始化脚本代码,所以这里执行扩展主要是为了下面的初始化脚本使用 //而业务的脚本代码如果需要Groovy扩展方法,则由BaseService启动执行即可 File[] firstFiles = Arrays.stream(scriptFiles) .filter(file -> file.getName().startsWith("First_")) .toArray(File[]::new); //按前面的数字排下序,防止有些需要先执行 Arrays.sort(firstFiles, Comparator.comparingInt(f -> Integer.parseInt(f.getName().split("_")[1]))); for (File file : firstFiles) { gse.run(file.getName(), binding); } //endregion //region 1、初始化脚本,先处理,因为要阻塞等待执行完成 //过滤掉Ignore_开头的文件,文件为公共方法 File[] initFiles = Arrays.stream(scriptFiles) .filter(file -> file.getName().startsWith("Init_")) .toArray(File[]::new); List> futureList = new ArrayList<>(); //按前面的数字排下序,防止有些需要先执行 Arrays.sort(initFiles, Comparator.comparingInt(f -> Integer.parseInt(f.getName().split("_")[1]))); for (File file : initFiles) { //加载并执行Groovy脚本,获取返回的Runnable对象,这里如果用createScript,然后调用invokeMethod需要传递两次参数,并且invokeMethod传递参数还是一个固定的, Runnable runnable = (Runnable) gse.run(file.getName(), binding); if (runnable != null) { //将Runnable对象添加到线程池 futureList.add(CompletableFuture.runAsync(runnable, initInfoExecutor)); } } CompletableFuture.allOf(futureList.toArray(new CompletableFuture[0])).join(); //endregion //region 2、间隔多少秒干活 //经过测试,groovy的好处就是Runnable里面出现异常了,还能继续下个间隔干活 File[] scheduleFiles = Arrays.stream(scriptFiles) .filter(file -> file.getName().startsWith("Schedule_")) .toArray(File[]::new); //按前面的数字排下序,防止有些需要先执行 Arrays.sort(scheduleFiles, Comparator.comparingInt(f -> Integer.parseInt(f.getName().split("_")[1]))); for (File file : scheduleFiles) { //加载并执行Groovy脚本,获取返回的Runnable对象 Runnable runnable = (Runnable) gse.run(file.getName(), binding); ScheduledThreadPoolExecutor cycleExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryWithNamePrefix("[周期任务线程池" + FilenameUtils.removeExtension(file.getName()) + supplierCode + "]")); long initialDelay = Integer.parseInt(file.getName().split("_")[2]); long delay = Integer.parseInt(file.getName().split("_")[3]); cycleExecutor.scheduleWithFixedDelay(runnable, initialDelay, delay, TimeUnit.SECONDS); scheduleThreadList.add(cycleExecutor); } //endregion //region 3、定时处理业务 File[] timeFiles = Arrays.stream(scriptFiles) .filter(file -> file.getName().startsWith("Time_")) .toArray(File[]::new); //按前面的数字排下序,防止有些需要先执行 Arrays.sort(timeFiles, Comparator.comparingInt(f -> Integer.parseInt(f.getName().split("_")[1]))); for (File file : timeFiles) { //加载并执行Groovy脚本,获取返回的Runnable对象 Runnable runnable = (Runnable) gse.run(file.getName(), binding); //完整的文件名 Time_1(序号)_D/M(按天、按月执行)_12(日期,12号,按天时为0)_15(小时)_00(分钟)_00(秒)_脚本说明.groovy String[] timeArray = file.getName().split("_"); String timeKind = timeArray[2];//执行类型 D按天 M按月 int day = Integer.parseInt(timeArray[3]);//日期,按天执行为统一为0即可 int hour = Integer.parseInt(timeArray[4]);//小时 int minutes = Integer.parseInt(timeArray[5]); int seconds = Integer.parseInt(timeArray[6]); ParticularTimeTasker timeTasker = new ParticularTimeTasker( runnable, () -> { //计算下一次执行的时间,此处代码应该简化一下,暂时为了验证和测试,先分开写 //当前时间 LocalDateTime now = LocalDateTime.now(); if (StringUtils.equalsIgnoreCase("D", timeKind)) { LocalDateTime nextExecutionDate = LocalDateTime.of(now.getYear(), now.getMonth(), now.getDayOfMonth(), hour, minutes, seconds); if (now.isAfter(nextExecutionDate)) { //如果当前时间已经过了今天的hour点minutes分seconds秒,那么任务的开始时间设置为明天的同一时间 nextExecutionDate = nextExecutionDate.plusDays(1); } logger.info("按天执行:下一次执行时间" + nextExecutionDate.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); Duration duration = Duration.between(now, nextExecutionDate); //此处不加1秒有问题,体现在如果脚本里面的执行特别快,duration.toSeconds()不是很精确,导致会在预定时间之前(大概200毫秒,一般不超过1秒)执行完成,然后由于时间还没到,会多次执行,所以统一加1秒 //主要是这里+1秒会不会导致每次都顺延多一秒钟,没法测试:(,但理论上应该是拿下一次执行的时间计算的,应该不会错 return duration.toSeconds() + 1; } else if (StringUtils.equalsIgnoreCase("M", timeKind)) { //每个月的day日hour点minutes分seconds秒执行 LocalDateTime nextExecutionDate = LocalDateTime.of(now.getYear(), now.getMonth(), day, hour, minutes, seconds); if (now.isAfter(nextExecutionDate)) { nextExecutionDate = nextExecutionDate.plusMonths(1); } logger.info("按月执行:下一次执行时间" + nextExecutionDate.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); Duration duration = Duration.between(now, nextExecutionDate); //ParticularTimeTasker中的定义的按秒间隔执行,查看ParticularTimeTasker的start方法 return duration.toSeconds() + 1; } return -1L; }); timeTasker.start(); particularTimeTaskList.add(timeTasker); } //endregion //region 4、一次性执行的脚本,比如创建目录等无需在线程池中执行的方法 File[] onceFiles = Arrays.stream(scriptFiles) .filter(file -> file.getName().startsWith("Run_")) .toArray(File[]::new); //按前面的数字排下序,防止有些需要先执行 Arrays.sort(timeFiles, Comparator.comparingInt(f -> Integer.parseInt(f.getName().split("_")[1]))); for (File file : onceFiles) { gse.run(file.getName(), binding); } //endregion } catch (IOException | ScriptException | ResourceException e) { throw new RuntimeException(e); } } } /** * 增加业务脚本 * * @param businessScript 脚本 * @param dataSourceId 分库 * @param supplierCode 分表 */ public void addBusinessScriptLocal(BusinessScript businessScript, String dataSourceId, long supplierCode) { if (redisService.hexists(keysService.getRedisKey(RedisKeys.KEY_BUSINESSSCRIPT, supplierCode), businessScript.getBusinessCode())) { return; } businessScript.setId(tableIdService.getTableIdMulti("tbBusinessScript.id", 1, dataSourceId, String.valueOf(supplierCode))); businessScript.setVoidFlag(0); BusinessScript.create(businessScript, 0L); redisService.hset(keysService.getRedisKey(RedisKeys.KEY_BUSINESSSCRIPT, supplierCode), businessScript.getBusinessCode(), jsonConvert.convertTo(businessScript)); docDao.addBusinessScript(businessScript, dataSourceId, supplierCode); } /** * 增加商户 * * @param dataSourceId 数据库id * @param supplierCode 商户code,对应商户id即可 * @param supplierType 商户类别,如印刷厂,外协厂等 * @param driverClassName 数据库连接驱动名称 * @param dataConnStr 数据库连接字符串 * @param userName 数据库登录账号 * @param password 数据库密码 * @return 操作信息 */ @SuppressWarnings({"unchecked", "rawtypes"}) public RMap addSupplier(String dataSourceId, String supplierCode, int supplierType, String driverClassName, String dataConnStr, String userName, String password) { if (sysDataBaseItem.containsKey(dataSourceId)) { return RMapUtils.error(HttpCode.CONFLICT.value(), "数据源已经存在"); } sysDataBaseItem.put(dataSourceId, supplierCode); RMap dataParams = new RMap(); dataParams.put("dataBaseAlias", dataSourceId); dataParams.put("shardingKey", supplierCode); dataParams.put("supplierType", supplierType); try { //写入主表 DB.update("insert into sys_DataBaseMultiItem (dataBaseAlias, shardingKey, itemType) values(#{dataBaseAlias}, #{shardingKey}, #{supplierType})", dataParams); //增加数据源 //私自注销 dbService.addDataSource(dataSourceId, driverClassName, dataConnStr, userName, password); //初始化 initSupplier(dataSourceId, Long.parseLong(supplierCode), supplierType); } catch (DBException e) { logger.error(e.getMessage(), e); } return RMapUtils.success(); } @SuppressWarnings({"rawtypes", "unused"}) public RMap updateSupplierDataBase() { //todo return null; } /** * 获取某个supplier对应的es全部索引 * * @param supplierCode 分表 * @param esKey key * @return key */ public String[] getDateYearESIndexArray(long supplierCode, String esKey) { if (bookSplitKind == 1) { return new String[]{keysService.getESKey(esKey, supplierCode) + "_current"}; } else { return esIndexMap.computeIfAbsent(supplierCode + "_" + esKey, (k) -> IntStream.rangeClosed(bookStartYear, bookEndYear).mapToObj((x) -> keysService.getESKey(esKey, supplierCode) + "_" + x).toArray(String[]::new)); } } /** * 获取某个supplier对应的es单个年份索引 * * @param supplierCode 分表 * @param esKey key * @param dataYear 年份 * @return es索引 */ public String getDateYearESIndex(long supplierCode, String esKey, int dataYear) { if (bookSplitKind == 1) { return keysService.getESKey(esKey, supplierCode) + "_current"; } else { return keysService.getESKey(esKey, supplierCode) + "_" + dataYear; } } /** * 获取账套列表,维护使用 * * @return KeyValuePair的key为dataSourceId,value为supplierCode */ public List getDataBaseItemList() { return sysDataBaseItem.entrySet().stream().map((x) -> new KeyValuePair(x.getKey(), x.getValue())).collect(Collectors.toList()); } /** * 从supplierCode获取数据源 * 分表一般是唯一的 * * @param supplierCode 分表 * @return dataSourceId */ public String getDataSourceIdFromSupplierCode(long supplierCode) { return sysDataBaseItem.entrySet().stream() .filter(entry -> entry.getValue().equalsIgnoreCase(String.valueOf(supplierCode))) .map(Map.Entry::getKey) .findFirst() .orElse(""); } /** * 业务数据结转 * 结转主要过程为 * 1、查找上一期结束时间 * 2、根据当期结束时间 * 3、创建对应的业务成套数据表 * 4、数据库中在新表中插入数据,并在旧表中删除数据(有个问题需考虑,实际测试mysql的delete速度非常慢,需要解决) * 5、从es中删除结转期对应的业务数据 * * @param dataSourceId 分库 * @param supplierCode 分表 * @return 操作信息 */ @SuppressWarnings({"rawtypes", "unchecked", "unused"}) public RetResult closedPeriodSupplier(String closedDateTime, ERPTokenUser currentUser, String dataSourceId, long supplierCode) { return handleScript("ClosedPeriod", ERPModule.SYSTEM, dataSourceId, supplierCode, () -> { RMap params = new RMap(); params.put("closedDateTime", closedDateTime); return ProcessMapItem.newBuilder() .itemData(params) .currentUser(currentUser) .dataSourceId(dataSourceId) .supplierCode(supplierCode) .build(); }); } /** * 获取下载根地址 * * @param dataSourceId 分库 * @param supplierCode 分表 * @return 下载根目录地址 */ public String getDownloadRootUrl(String dataSourceId, long supplierCode) { ConfigValue downUrlConfig = configService.getRedisConfigValue("OrderDownloadServer", dataSourceId, supplierCode); return (downUrlConfig == null) ? downloadUrl : downUrlConfig.getConfigValue1(); } public String getWebDownloadRootUrl(String dataSourceId, long supplierCode) { ConfigValue downUrlConfig = configService.getRedisConfigValue("OrderDownloadServer", dataSourceId, supplierCode); return (downUrlConfig == null) ? outDownloadUrl : downUrlConfig.getConfigValue2(); } /** * 获取文件内网下载地址 * 由于直接存入拼接好的下载地址不利于后期修改地址,而且存储字段过长,改为实时获取下载地址 * 下面链接未做跨域处理,如需跨域,可参考 ... * @param bucketFileName 文件名,包含目录,比如 productImage/abc.jpg * @param srcFileName 原文件名,比如订单的文件名,用于下载文件时可以更方便的命名 * @param dataSourceId 分库 * @param supplierCode 分表 * @return 下载地址 */ public String getFileDownloadUrl(String bucketFileName, String srcFileName, String dataSourceId, long supplierCode) { if (StringUtils.isEmpty(bucketFileName)) { return ""; } ConfigValue downUrlConfig = configService.getRedisConfigValue("OrderDownloadServer", dataSourceId, supplierCode); String httpUrl = (downUrlConfig == null) ? downloadUrl : downUrlConfig.getConfigValue1(); String url = httpUrl + supplierCode + "/" + bucketFileName; if (StringUtils.isNotBlank(srcFileName)) { url = url + "?filename=" + URLEncoder.encode(srcFileName, StandardCharsets.UTF_8).replaceAll("\\+", "%20"); } return url; } /** * 获取文件外网下载地址 * 由于直接存入拼接好的下载地址不利于后期修改地址,而且存储字段过长,改为实时获取下载地址 * * @param bucketFileName 文件名,包含目录,比如 productImage/abc.jpg * @param srcFileName 原文件名,比如订单的文件名,用于下载文件时可以更方便的命名 * @param dataSourceId 分库 * @param supplierCode 分表 * @return 下载地址 */ public String getFileWebDownloadUrl(String bucketFileName, String srcFileName, String dataSourceId, long supplierCode) { if (StringUtils.isEmpty(bucketFileName)) { return ""; } ConfigValue downUrlConfig = configService.getRedisConfigValue("OrderDownloadServer", dataSourceId, supplierCode); String httpUrl = (downUrlConfig == null) ? downloadUrl : downUrlConfig.getConfigValue2(); String url = httpUrl + supplierCode + "/" + bucketFileName; if (StringUtils.isNotBlank(srcFileName)) { url = url + "?filename=" + URLEncoder.encode(srcFileName, StandardCharsets.UTF_8).replaceAll("\\+", "%20"); } return url; } }