Browse Source

Pull request #405: REPORT-100817 && REPORT-102146 fix: s3插件对于分片上传的支持;isDirectory方法某些场景判断不正确

Merge in PG/plugin-repository-s3 from ~AFLY/plugin-repository-s3:release/10.0 to release/10.0

* commit 'ee7b156bde0eb714538029130a72602d2fb5d8ea':
  无jira任务 fix: 提升插件版本
  JSY-28624 fix: 优化下分片上传逻辑
  REPORT-102146 fix: isDirectory方法某些场景判断不正确
  REPORT-100817 fix: s3插件对于分片上传的支持
release/10.0
Afly-储泓飞 2 years ago
parent
commit
688f08bd4d
  1. 3
      plugin.xml
  2. 107
      src/main/java/com/fanruan/fs/s3/repository/core/S3ResourceRepository.java

3
plugin.xml

@ -5,12 +5,13 @@
<main-package>com.fanruan.fs</main-package> <main-package>com.fanruan.fs</main-package>
<active>yes</active> <active>yes</active>
<hidden>no</hidden> <hidden>no</hidden>
<version>1.3.9</version> <version>1.4.0</version>
<env-version>10.0~10.0</env-version> <env-version>10.0~10.0</env-version>
<jartime>2023-03-14</jartime> <jartime>2023-03-14</jartime>
<vendor>richie</vendor> <vendor>richie</vendor>
<description><![CDATA[使用支持S3协议的云存储文件系统作为文件服务器。]]></description> <description><![CDATA[使用支持S3协议的云存储文件系统作为文件服务器。]]></description>
<change-notes><![CDATA[ <change-notes><![CDATA[
[2023-08-08]支持分片上传,模板保存问题修复。 <br/>
[2023-06-30]修复默认配置获取错误的问题,过滤有问题的路径。 <br/> [2023-06-30]修复默认配置获取错误的问题,过滤有问题的路径。 <br/>
[2023-03-28]第三方组件升级。 <br/> [2023-03-28]第三方组件升级。 <br/>
[2023-01-03]优化写文件性能; 修复文件太多显示不全的问题。<br/> [2023-01-03]优化写文件性能; 修复文件太多显示不全的问题。<br/>

107
src/main/java/com/fanruan/fs/s3/repository/core/S3ResourceRepository.java

@ -7,25 +7,33 @@ import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadResult;
import com.amazonaws.services.s3.model.ListObjectsRequest; import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PartETag;
import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.model.UploadPartResult;
import com.amazonaws.util.IOUtils; import com.amazonaws.util.IOUtils;
import com.fanruan.api.log.LogKit; import com.fanruan.api.log.LogKit;
import com.fanruan.api.util.StringKit; import com.fanruan.api.util.StringKit;
import com.fr.io.repository.FineFileEntry; import com.fr.io.repository.FineFileEntry;
import com.fr.io.repository.base.BaseResourceRepository; import com.fr.io.repository.base.BaseResourceRepository;
import com.fr.io.utils.ResourceIOUtils;
import com.fr.stable.Filter; import com.fr.stable.Filter;
import com.fr.stable.StringUtils; import com.fr.stable.StringUtils;
import com.fr.third.org.apache.commons.io.output.NullOutputStream; import com.fr.third.org.apache.commons.io.output.NullOutputStream;
import com.fr.workspace.resource.ResourceIOException; import com.fr.workspace.resource.ResourceIOException;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.URL; import java.net.URL;
import java.net.URLConnection; import java.net.URLConnection;
@ -41,6 +49,10 @@ public class S3ResourceRepository extends BaseResourceRepository {
private static final int PAGE_SIZE = 1000; private static final int PAGE_SIZE = 1000;
private static final int PART_SIZE = 5 * 1024 * 1024;
private static final int MULTIPART_UPLOAD_LIMIT = 20 * PART_SIZE;
private static final String DELIMITER = "/"; private static final String DELIMITER = "/";
public static final String HTTP = "http:"; public static final String HTTP = "http:";
@ -67,7 +79,6 @@ public class S3ResourceRepository extends BaseResourceRepository {
if (config.getEndPoint().startsWith(HTTP)) { if (config.getEndPoint().startsWith(HTTP)) {
clientConfiguration.setProtocol(Protocol.HTTP); clientConfiguration.setProtocol(Protocol.HTTP);
} }
clientConfiguration.setProtocol(Protocol.HTTP);
amazonS3ClientBuilder = amazonS3ClientBuilder.withClientConfiguration(clientConfiguration); amazonS3ClientBuilder = amazonS3ClientBuilder.withClientConfiguration(clientConfiguration);
this.s3 = amazonS3ClientBuilder.build(); this.s3 = amazonS3ClientBuilder.build();
this.bucket = config.getBucket(); this.bucket = config.getBucket();
@ -142,22 +153,71 @@ public class S3ResourceRepository extends BaseResourceRepository {
@Override @Override
public void write(String path, byte[] data) { public void write(String path, byte[] data) {
ObjectMetadata metadata; int length = data.length;
try { if (length > MULTIPART_UPLOAD_LIMIT) {
metadata = s3.getObjectMetadata(bucket, path); multipartUpload(path, new ByteArrayInputStream(data));
} catch (Exception e) { } else {
metadata = new ObjectMetadata(); ObjectMetadata metadata;
String mimeType = URLConnection.guessContentTypeFromName(path); try {
if (mimeType != null) { metadata = s3.getObjectMetadata(bucket, path);
metadata.setContentType(mimeType); } catch (Exception e) {
metadata = new ObjectMetadata();
String mimeType = URLConnection.guessContentTypeFromName(path);
if (mimeType != null) {
metadata.setContentType(mimeType);
}
} }
if (metadata != null) {
metadata.setContentLength(length);
}
s3.putObject(bucket, path, new ByteArrayInputStream(data), metadata);
} }
if (metadata != null) {
metadata.setContentLength(data.length);
}
s3.putObject(bucket, path, new ByteArrayInputStream(data), metadata);
} }
@Override
public void write(String path, InputStream inputStream) throws ResourceIOException {
multipartUpload(path, inputStream);
}
private void multipartUpload(String path, InputStream inputStream) {
try {
// Step 1: 初始化分片上传
InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucket, path);
InitiateMultipartUploadResult initResponse = s3.initiateMultipartUpload(initRequest);
String uploadId = initResponse.getUploadId();
// Step 2: 分片上传文件
List<PartETag> partETags = new ArrayList<>();
byte[] buffer = new byte[PART_SIZE];
int bytesRead;
int partNumber = 1;
while ((bytesRead = inputStream.read(buffer)) > 0) {
// 创建上传请求
UploadPartRequest uploadRequest = new UploadPartRequest()
.withBucketName(bucket)
.withKey(path)
.withUploadId(uploadId)
.withPartNumber(partNumber)
.withInputStream(new ByteArrayInputStream(buffer, 0, bytesRead))
.withPartSize(bytesRead);
// 上传分片
UploadPartResult uploadResult = s3.uploadPart(uploadRequest);
partETags.add(uploadResult.getPartETag());
partNumber++;
}
// Step 3: 完成分片上传
CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(bucket, path, uploadId, partETags);
s3.completeMultipartUpload(compRequest);
} catch (IOException e) {
throw new ResourceIOException(e);
} finally {
ResourceIOUtils.close(inputStream);
}
}
@Override @Override
public boolean createFile(String path) { public boolean createFile(String path) {
@ -331,19 +391,16 @@ public class S3ResourceRepository extends BaseResourceRepository {
@Override @Override
public boolean isDirectory(String path) { public boolean isDirectory(String path) {
if (path.endsWith(DELIMITER)) {
if (path.endsWith(DELIMITER) && exist(path)) { return exist(path);
return true;
}
ObjectListing listing = s3.listObjects(bucket, path);
if (listing.getObjectSummaries().isEmpty()) {
return false;
}
if (listing.getObjectSummaries().size() > 1) {
return true;
} else { } else {
S3ObjectSummary summary = listing.getObjectSummaries().get(0); ObjectListing listing = s3.listObjects(bucket, path);
return !StringKit.equals(listing.getPrefix(), summary.getKey()); List<S3ObjectSummary> objectSummaries = listing.getObjectSummaries();
if (objectSummaries.isEmpty()) {
return false;
}
String dirFormat = path + DELIMITER;
return objectSummaries.stream().anyMatch(s3ObjectSummary -> StringUtils.equals(s3ObjectSummary.getKey(), dirFormat));
} }
} }

Loading…
Cancel
Save