Bladeren bron

实现HDFS上传文件功能(暂时有个小瑕疵,配置类中设置HADOOP_USER_NAME不生效问题)

lijie 3 jaren geleden
bovenliggende
commit
aa1499f92d

+ 0 - 51
dmp-cloud/dmp-file/pom.xml

@@ -16,70 +16,19 @@
         <dependency>
             <groupId>com.persagy</groupId>
             <artifactId>integrated-config-client</artifactId>
-            <exclusions>
-                <exclusion>
-                    <artifactId>slf4j-api</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-            </exclusions>
         </dependency>
         <!-- 项目启动 -->
         <dependency>
             <groupId>com.persagy</groupId>
             <artifactId>dmp-server</artifactId>
-            <exclusions>
-                <exclusion>
-                    <artifactId>slf4j-api</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-            </exclusions>
         </dependency>
         <dependency>
             <groupId>com.persagy</groupId>
             <artifactId>dmp-mybatis</artifactId>
-            <exclusions>
-                <exclusion>
-                    <artifactId>slf4j-api</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-            </exclusions>
         </dependency>
         <dependency>
             <groupId>com.persagy</groupId>
             <artifactId>dmp-file-starter</artifactId>
-            <exclusions>
-                <exclusion>
-                    <artifactId>slf4j-api</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-            </exclusions>
         </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>2.5.1</version>
-            <exclusions>
-                <exclusion>
-                    <artifactId>slf4j-api</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>log4j</artifactId>
-                    <groupId>log4j</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <version>2.5.1</version>
-        </dependency>
-        <!-- https://mvnrepository.com/artifact/org.slf4j/log4j-over-slf4j -->
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>log4j-over-slf4j</artifactId>
-            <version>1.7.32</version>
-        </dependency>
-
     </dependencies>
 </project>

+ 44 - 0
dmp-cloud/dmp-file/src/main/java/com/persagy/dmp/file/controller/TestController.java

@@ -0,0 +1,44 @@
+package com.persagy.dmp.file.controller;
+
+import com.baomidou.mybatisplus.core.toolkit.IdWorker;
+import com.persagy.dmp.common.context.AppContext;
+import com.persagy.dmp.common.lang.PsDateTime;
+import com.persagy.dmp.common.model.response.CommonResult;
+import com.persagy.dmp.common.utils.ResultHelper;
+import com.persagy.dmp.file.model.FileInfo;
+import com.persagy.dmp.file.model.FileInfoCreator;
+import com.persagy.dmp.file.service.IFileService;
+import com.persagy.dmp.storage.service.FileStorageFactory;
+import com.persagy.dmp.storage.utils.FileStorageHelper;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.MediaType;
+import org.springframework.web.bind.annotation.*;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.IOException;
+
+/**
+ * 文件管理Controller
+ * @author Charlie Yu
+ * @date 2021-05-15
+ */
+@RestController
+@RequestMapping("/test")
+public class TestController {
+
+    @Autowired
+    IFileService fileService;
+
+    @PostMapping(value = "/uploadFile")
+    @CrossOrigin
+    public CommonResult<FileInfo> uploadFile(@RequestParam("file") MultipartFile multipartFile) throws IOException {
+        String fileName = multipartFile.getResource().getFilename();
+        FileInfo fileInfo = FileInfoCreator.of(AppContext.getContext().getGroupCode(),
+                IdWorker.getIdStr(), "ADM", fileName);
+        fileInfo.setCreationTime(new PsDateTime());
+        // 可对fileInfo属性额外配置。如有效期等
+        fileService.insertFile(fileInfo);
+        FileStorageFactory.getService().upload(fileInfo.getFileBucket(), fileInfo.getFilePath(), multipartFile.getInputStream());
+        return ResultHelper.single(fileInfo);
+    }
+}

+ 7 - 10
dmp-cloud/dmp-file/src/main/resources/bootstrap.yml

@@ -22,13 +22,10 @@ persagy:
       enabled: false
     # minIO文件服务器信息
     file:
-      url: http://192.168.25.129:9000/
-      accessKey: admin
-      secretKey: 12345678
-  hdfs:
-    file:
-      accounts:
-        dev: 123
-        saas: 46f869eea8b31d14
-        superClass: 90b92e6f71b47b31
-        revit: 63afbef6906c342b
+      storage: 2
+#      url: http://192.168.25.129:9000/
+#      accessKey: admin
+#      secretKey: 12345678
+      url: hdfs://node1:8020
+      accessKey: saga
+      secretKey: 12345678

+ 0 - 25
dmp-cloud/dmp-file/src/main/resources/core-site.xml

@@ -34,29 +34,4 @@
         <value>10</value>
     </property>
 
-<property>
-	<name>hadoop.security.authentication</name>
-	<value>kerberos</value>
-</property> 
-<property>
-	<name>hadoop.security.authorization</name>
-	<value>true</value>
-</property>
-<property>
-	<name>hadoop.rpc.protection</name>
-	<value>authentication</value>
-</property>
-<property>
-	<name>hadoop.http.authentication.type</name>
-	<value>kerberos</value>
-</property>
-<property>
-	<name>hadoop.rpc.protection</name>
-	<value>privacy</value>
-</property>
-<property>
-	<name>hadoop.http.authentication.kerberos.keytab</name>
-	<value>/var/security/keytab/kerberos.keytab</value>
-</property>
-
 </configuration>

+ 0 - 56
dmp-cloud/dmp-file/src/main/resources/hdfs-site.xml

@@ -77,61 +77,5 @@
 		<name>dfs.data.transfer.protection</name>
 		<value>integrity</value>
 	</property>
-	
-	<property>
-		<name>dfs.namenode.keytab.file</name>
-		<value>/var/security/keytab/kerberos.keytab</value>
-	</property>
-	<property>
-		<name>dfs.namenode.kerberos.principal</name>
-		<value>hadoop/_HOST@HADOOP.COM</value>
-	</property>
-	<property>
-		<name>dfs.namenode.kerberos.internal.spnego.principal</name>
-		<value>HTTP/_HOST@HADOOP.COM</value>
-	</property>
-	<property>
-		<name>dfs.secondary.namenode.keytab.file</name>
-		<value>/var/security/keytab/kerberos.keytab</value>
-	</property>
-	<property>
-		<name>dfs.secondary.namenode.kerberos.principal</name>
-		<value>hadoop/_HOST@HADOOP.COM</value>
-	</property>
-	<property>
-		<name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
-		<value>HTTP/_HOST@HADOOP.COM</value>
-	</property>
-	
-	<property>
-		<name>dfs.web.authentication.kerberos.keytab</name>
-		<value>/var/security/keytab/kerberos.keytab</value>
-	</property>
-	<property>
-		<name>dfs.web.authentication.kerberos.principal</name>
-		<value>HTTP/_HOST@HADOOP.COM</value>
-	</property>
-	
-	<property>
-		<name>dfs.datanode.keytab.file</name>
-		<value>/var/security/keytab/kerberos.keytab</value>
-	</property>	
-	<property>
-		<name>dfs.datanode.kerberos.principal</name>
-		<value>hadoop/_HOST@HADOOP.COM</value>
-	</property>
-	
-	<property>
-		<name>dfs.journalnode.keytab.file</name>
-		<value>/var/security/keytab/kerberos.keytab</value>
-	</property> 
-	<property>
-		<name>dfs.journalnode.kerberos.principal</name>
-		<value>hadoop/_HOST@HADOOP.COM</value>
-	</property> 
-	<property>
-		<name>dfs.journalnode.kerberos.internal.spnego.principal</name>
-		<value>${dfs.web.authentication.kerberos.principal}</value>
-	</property>
 
 </configuration>

+ 26 - 0
dmp-comp/dmp-file-starter/pom.xml

@@ -26,6 +26,32 @@
             <groupId>org.springframework.cloud</groupId>
             <artifactId>spring-cloud-starter-openfeign</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>2.5.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>2.5.1</version>
+        </dependency>
+        <!-- https://mvnrepository.com/artifact/org.slf4j/log4j-over-slf4j -->
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+            <version>1.7.32</version>
+        </dependency>
+        <dependency>
+            <groupId>com.baomidou</groupId>
+            <artifactId>mybatis-plus-annotation</artifactId>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.baomidou</groupId>
+            <artifactId>mybatis-plus-extension</artifactId>
+            <scope>compile</scope>
+        </dependency>
     </dependencies>
 
 </project>

+ 2 - 0
dmp-comp/dmp-file-starter/src/main/java/com/persagy/dmp/file/model/FileInfo.java

@@ -1,5 +1,6 @@
 package com.persagy.dmp.file.model;
 
+import com.baomidou.mybatisplus.annotation.TableName;
 import com.persagy.dmp.common.lang.PsDate;
 import com.persagy.dmp.common.model.entity.AuditableEntity;
 import lombok.Data;
@@ -12,6 +13,7 @@ import lombok.EqualsAndHashCode;
  */
 @Data
 @EqualsAndHashCode(callSuper = true)
+@TableName(value = "dt_file_info", autoResultMap = true)
 public class FileInfo extends AuditableEntity {
 
     /** 集团编码:租户 */

+ 136 - 0
dmp-comp/dmp-file-starter/src/main/java/com/persagy/dmp/storage/config/HdfsConfig.java

@@ -0,0 +1,136 @@
+package com.persagy.dmp.storage.config;
+
+import com.persagy.dmp.common.constant.ResponseCode;
+import com.persagy.dmp.common.exception.BusinessException;
+import com.persagy.dmp.storage.service.IFileStorageService;
+import com.persagy.dmp.storage.service.impl.HdfsStorageServiceImpl;
+import com.persagy.dmp.storage.service.impl.MinioStorageServiceImpl;
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
+import org.springframework.core.io.support.ResourcePatternResolver;
+
+import javax.annotation.PostConstruct;
+import java.io.IOException;
+
+/**
+ * MinIO配置
+ * @author Charlie Yu
+ * @date 2021-05-15
+ */
+@Configuration
+@ConfigurationProperties(prefix = "persagy.common.file")
+@Data
+@Slf4j
+public class HdfsConfig {
+
+    /**jar同级config目录下的位置*/
+    private static final String FILE_CONFIG_PATH="file:./config/";
+    /**Classpath位置*/
+    private static final String FILE_CLASSPATH_PATH="classpath:./";
+    /**hdfs-site.xml文件名称*/
+    private static final String HDFS_SITE_FILE_NAME ="hdfs-site.xml";
+    /**core-site.xml文件名称*/
+    private static final String HDFS_CORE_FILE_NAME ="core-site.xml";
+    /**hdfs访问地址*/
+    private static final String HDFS_URL_KEY="fs.defaultFS";
+    /**hdfs访问用户名*/
+    //private static final String HADOOP_USER_NAME="HADOOP_USER_NAME";
+
+    /** 文件服务器url */
+    private String url;
+    /** 文件服务器accessKey */
+    private String accessKey;
+    /** 文件服务器secretKey */
+    private String secretKey;
+
+    @Bean
+    @ConditionalOnProperty(value = "persagy.common.file.storage", havingValue = "2")
+    public org.apache.hadoop.conf.Configuration plainConfig() {
+        org.apache.hadoop.conf.Configuration plainConfig = new org.apache.hadoop.conf.Configuration();
+        try {
+            String hbaseSiteConfigFilePath = FILE_CONFIG_PATH+ HDFS_SITE_FILE_NAME;
+            String hbaseSiteClasspathFilePath = FILE_CLASSPATH_PATH+ HDFS_SITE_FILE_NAME;
+            log.info("hdfs-site.xml文件存在情况,config目录:{},classPath:{}",checkResoucesExist(hbaseSiteConfigFilePath),
+                    checkResoucesExist(hbaseSiteClasspathFilePath));
+            log.info("core-site.xml文件存在情况,config目录:{},classPath:{}",checkResoucesExist(hbaseSiteConfigFilePath),
+                    checkResoucesExist(hbaseSiteClasspathFilePath));
+            log.info("classpath下的文件是否存在:"+checkResoucesExist(hbaseSiteClasspathFilePath));
+            // 导入hbase-site.xml文件
+            Resource[] siteResources = getResource(hbaseSiteConfigFilePath, hbaseSiteClasspathFilePath);
+            if (siteResources.length<1){
+                throw new BusinessException(ResponseCode.B0300.getCode(),"hdfs-site.xml文件不存在");
+            }
+            // 导入hbase-core.xml文件
+            Resource[] coreResources = getResource(hbaseSiteConfigFilePath, hbaseSiteClasspathFilePath);
+            if (coreResources.length<1){
+                throw new BusinessException(ResponseCode.B0300.getCode(),"core-site.xml文件不存在");
+            }
+            plainConfig.set(HDFS_URL_KEY,url);
+            Resource siteResource = siteResources[0];
+            plainConfig.addResource(siteResource.getInputStream());
+            Resource coreResource = coreResources[0];
+            plainConfig.addResource(coreResource.getInputStream());
+        }catch (Exception e) {
+            log.error("加载HDFS配置文件失败!",e);
+        }
+        return plainConfig;
+    }
+
+    @Bean
+    @ConditionalOnProperty(value = "persagy.common.file.storage", havingValue = "2")
+    public IFileStorageService hdfsStorageServiceImpl() {
+        return new HdfsStorageServiceImpl(plainConfig());
+    }
+
+    /***
+     * @Description: 校验资源文件是否存在
+     * @param configFilePath : jar包同级目录下的配置文件
+     * @return : com.alibaba.fastjson.JSONArray
+     * @author: lijie
+     * @Date:2020/6/6 19:43
+     * Update By lijie 2020/6/6 19:43
+     */
+    private static boolean checkResoucesExist(String configFilePath) throws IOException {
+        ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
+        Resource[] resolverResources = resolver.getResources(configFilePath);
+        if (resolverResources.length > 0 && resolverResources[0].exists()){
+            return true;
+        }
+        return false;
+    }
+
+    /***
+     * Description: 获取资源文件方法
+     * @param configFilePath : 配置文件路径
+     * @param classpathFilePath : classpath下的文件
+     * @return : org.springframework.core.io.Resource[]
+     * @author : lijie
+     * @date :2021/3/1 14:25
+     * Update By lijie 2021/3/1 14:25
+     */
+    public static Resource[] getResource(String configFilePath, String classpathFilePath)
+            throws IOException {
+        ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
+        Resource[] resolverResources = resolver.getResources(configFilePath);
+        if (resolverResources.length > 0) {
+            boolean exist = true;
+            for (Resource resource : resolverResources) {
+                if (!resource.exists()) {
+                    exist = false;
+                    break;
+                }
+            }
+            if(exist) {
+                return resolverResources;
+            }
+        }
+        return resolver.getResources(classpathFilePath);
+    }
+
+}

+ 11 - 6
dmp-comp/dmp-file-starter/src/main/java/com/persagy/dmp/storage/config/MinioConfig.java

@@ -1,7 +1,11 @@
 package com.persagy.dmp.storage.config;
 
+import com.persagy.dmp.storage.service.IFileStorageService;
+import com.persagy.dmp.storage.service.impl.MinioStorageServiceImpl;
 import io.minio.MinioClient;
 import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
@@ -24,12 +28,13 @@ public class MinioConfig {
     private String secretKey;
 
     @Bean
+    @ConditionalOnProperty(value = "persagy.common.file.storage", havingValue = "0")
     public MinioClient minioClient() {
-//        try {
-//            return new MinioClient(url, accessKey, secretKey);
-            return MinioClient.builder().endpoint(url).credentials(accessKey, secretKey).build();
-//        } catch (MinioException e) {
-//            throw new BusinessException(e.getMessage(), e);
-//        }
+        return MinioClient.builder().endpoint(url).credentials(accessKey, secretKey).build();
+    }
+    @Bean
+    @ConditionalOnProperty(value = "persagy.common.file.storage", havingValue = "0")
+    public IFileStorageService minioStorageServiceImpl() {
+        return new MinioStorageServiceImpl(minioClient());
     }
 }

+ 3 - 1
dmp-comp/dmp-file-starter/src/main/java/com/persagy/dmp/storage/constant/FileStorageEnum.java

@@ -12,7 +12,9 @@ public enum FileStorageEnum {
     /** MinIO */
     MINIO("0", "minioStorageServiceImpl"),
     /** FastDFS */
-    FAST_DFS("1", "fastDfsStorageServiceImpl");
+    FAST_DFS("1", "fastDfsStorageServiceImpl"),
+    /** HDFS */
+    H_DFS("2", "hdfsStorageServiceImpl");
 
     private String index;
     private String name;

+ 193 - 0
dmp-comp/dmp-file-starter/src/main/java/com/persagy/dmp/storage/service/impl/HdfsStorageServiceImpl.java

@@ -0,0 +1,193 @@
+package com.persagy.dmp.storage.service.impl;
+
+import cn.hutool.core.io.IoUtil;
+import cn.hutool.core.util.StrUtil;
+import com.persagy.dmp.common.constant.ResponseCode;
+import com.persagy.dmp.common.exception.BusinessException;
+import com.persagy.dmp.storage.constant.FileCommonConst;
+import com.persagy.dmp.storage.service.IFileStorageService;
+import com.sun.imageio.plugins.common.ImageUtil;
+import io.minio.*;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.Resource;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+
+/**
+ * MinIO存储服务实现类
+ * @author Charlie Yu
+ * @date 2021-05-15
+ */
+// @Service
+@Slf4j
+@RequiredArgsConstructor
+public class HdfsStorageServiceImpl implements IFileStorageService {
+
+    private final Configuration configuration;
+    /** 文件key默认存储路径  */
+    private final static String BASE_FILE_PATH="/test/files";
+    /** 对文件key进行hash散列计算可能的结果数量  */
+    public static final int HASH_FILE_FOLDER_NUM = 1024;
+    /**
+     * 文件上传
+     * @param bucketName 桶名
+     * @param fileName 文件名 - 同名文件则覆盖
+     * @param inputStream 输入流
+     */
+    @Override
+    public void upload(String bucketName, String fileName, InputStream inputStream) {
+        // 容错判断
+        if(StrUtil.isBlank(fileName) || inputStream == null) {
+            throw new BusinessException("上传文件参数有误!");
+        }
+        try (FileSystem fs = FileSystem.get(configuration)) {
+            Path filePath = getFilePath(bucketName,fileName);
+            if (fs.exists(filePath)) {
+                throw new BusinessException(ResponseCode.B0300.getCode(),"File is exists");
+            }
+            try (FSDataOutputStream outputStream = fs.create(filePath)) {
+                IoUtil.copy(inputStream, outputStream);
+            }
+        } catch (IOException e) {
+            MinioExceptionHandler.handleException(e);
+        }
+    }
+
+    /**
+     * 文件下载
+     * @param bucketName 桶名
+     * @param fileName 文件名
+     * @return
+     */
+    @Override
+    public InputStream download(String bucketName, String fileName) {
+        // 容错判断
+        if(StrUtil.isBlank(fileName)) {
+            throw new BusinessException("下载文件参数有误!");
+        }
+        try (FileSystem fs = FileSystem.get(configuration)) {
+            Path filePath = getFilePath(bucketName,fileName);
+            if (!fs.exists(filePath)) {
+                throw new BusinessException(ResponseCode.B0300.getCode(),"File is not exists");
+            }
+            return fs.open(filePath);
+        } catch (IOException e) {
+            MinioExceptionHandler.handleException(e);
+        }
+        return null;
+    }
+
+    @Override
+    public String fetchUrl(String bucketName, String fileName) {
+        // 容错判断
+        if(StrUtil.isBlank(fileName)) {
+            throw new BusinessException("下载文件参数有误!");
+        }
+        // TODO
+        return null;
+    }
+    /**
+     * 文件是否存在
+     * @param bucketName 桶名
+     * @param fileName 文件名
+     * @return
+     */
+    @Override
+    public boolean exists(String bucketName, String fileName) {
+        // 容错判断
+        if(StrUtil.isBlank(fileName)) {
+            throw new BusinessException("检查文件参数有误!");
+        }
+        try (FileSystem fs = FileSystem.get(configuration)) {
+            Path filePath = getFilePath(bucketName,fileName);
+            return fs.exists(filePath);
+        } catch (IOException e) {
+            MinioExceptionHandler.handleException(e);
+        }
+        return false;
+    }
+    /**
+     * 删除文件
+     * @param bucketName 桶名
+     * @param fileName 文件名
+     */
+    @Override
+    public void delete(String bucketName, String fileName) {
+        // 容错判断
+        if(StrUtil.isBlank(fileName)) {
+            throw new BusinessException("删除文件参数有误!");
+        }
+        try (FileSystem fs = FileSystem.get(configuration)) {
+            Path filePath = getFilePath(bucketName,fileName);
+            if (!fs.exists(filePath)){
+                return;
+            }
+            if (!fs.isFile(filePath)){
+                return;
+            }
+            // 非递归删除
+            fs.delete(filePath,false);
+        } catch (IOException e) {
+            MinioExceptionHandler.handleException(e);
+        }
+    }
+    /**
+     * 删除桶
+     * @param bucketName 桶名
+     */
+    @Override
+    public void deletePath(String bucketName) {
+        // 容错判断 桶名为空时也不让删
+        if(StrUtil.isBlank(bucketName)) {
+            throw new BusinessException("删除文件参数有误!");
+        }
+        try (FileSystem fs = FileSystem.get(configuration)) {
+            Path filePath = getFilePath(bucketName,null);
+            if (!fs.exists(filePath)){
+                return;
+            }
+            // 递归删除
+            fs.delete(filePath,true);
+        } catch (IOException e) {
+            MinioExceptionHandler.handleException(e);
+        }
+    }
+    /***
+     * Description: 获取文件路径对象
+     * @param bucketName : 子路径名称
+     * @param fileName : 文件名称
+     * @return : org.apache.hadoop.fs.Path
+     * @author : lijie
+     * @date :2021/9/9 0:11
+     * Update By lijie 2021/9/9 0:11
+     */
+    private Path getFilePath(String bucketName, String fileName) {
+        if (StrUtil.isNotBlank(fileName)){
+            return new Path(String.join("/", BASE_FILE_PATH, bucketName, getHashFilePath(fileName), fileName));
+        }
+        return new Path(String.join("/", BASE_FILE_PATH, bucketName));
+    }
+
+    /**
+     * 获取文件key对应的hash散列结果值
+     * @param key
+     * @return 0-1024之间的数字字符串
+     */
+    private String getHashFilePath(String key) {
+        key = key == null ? "" : key;
+        // 得到哈希正整数
+        int hashInt = Math.abs(key.hashCode());
+        // 设置哈希字符值,长度为4位
+        hashInt = 1000000 + hashInt % HASH_FILE_FOLDER_NUM;
+        return ("" + hashInt).substring(3);
+    }
+}

+ 5 - 3
dmp-comp/dmp-file-starter/src/main/java/com/persagy/dmp/storage/service/impl/MinioStorageServiceImpl.java

@@ -5,7 +5,9 @@ import com.persagy.dmp.common.exception.BusinessException;
 import com.persagy.dmp.storage.constant.FileCommonConst;
 import com.persagy.dmp.storage.service.IFileStorageService;
 import io.minio.*;
+import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 
 import javax.annotation.Resource;
@@ -16,12 +18,12 @@ import java.io.InputStream;
  * @author Charlie Yu
  * @date 2021-05-15
  */
-@Service
+// @Service
 @Slf4j
+@RequiredArgsConstructor
 public class MinioStorageServiceImpl implements IFileStorageService {
 
-    @Resource
-    private MinioClient minioClient;
+    private final MinioClient minioClient;
 
     @Override
     public void upload(String bucketName, String fileName, InputStream input) {

+ 9 - 0
dmp-server/src/main/java/com/persagy/aspects/RequestLogAspect.java

@@ -47,6 +47,9 @@ public class RequestLogAspect {
 			return;
 		}
 		HttpServletRequest request = attributes.getRequest();
+		if (!"application/json".equals(request.getContentType())){
+			return;
+		}
 		LogData data = initFlag(request);
 		dataStorage.set(data);
 		log.info("{} REQUEST URL      : {} {} {} {}", data.intFlag, data.method, data.url, data.remoteIp, data.remotePort);
@@ -77,6 +80,9 @@ public class RequestLogAspect {
 
 	@AfterReturning(returning = "ret", pointcut = "controllerPoint()")
 	public void doAfterReturning(Object ret) {
+		if (null==dataStorage.get()){
+			return;
+		}
 		LogData data = dataStorage.get();
 		String result = null;
 		if (ret != null) {
@@ -92,6 +98,9 @@ public class RequestLogAspect {
 
 	@AfterThrowing(throwing = "ex", pointcut = "controllerPoint()")
 	public void doAfterThrowing(Throwable ex) {
+		if (null==dataStorage.get()){
+			return;
+		}
 		LogData data = dataStorage.get();
 		log.error("{} REQUEST EXCEPTION: {} {} {}", data.intFlag, System.currentTimeMillis() - data.timestamp, data.url, ex);
 		dataStorage.remove();