Skip to content

Commit

Permalink
feat: support for uploading folders in this current example
Browse files Browse the repository at this point in the history
  • Loading branch information
dayuy committed Dec 20, 2023
1 parent fc1b832 commit ac59b2e
Show file tree
Hide file tree
Showing 2 changed files with 70 additions and 7 deletions.
39 changes: 38 additions & 1 deletion apiserver/examples/upload-download-file/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,44 @@ I1211 15:16:22.198117 2903929 main.go:111] [DEBUG] send get request to http://lo
I1211 15:16:22.200152 2903929 main.go:339] [Done], the file already exists and does not need to be uploaded again
```
2. Download the file you just uploaded
2. Upload local folder, local folder is /Users/local/abc
```shell
./main --file=/Users/local/abc

I1220 14:44:57.237730 12365 main.go:288] [DEBUG] ***** part 0, md5 is 33468e040ef4add042ce484a4753a075
I1220 14:44:57.238949 12365 main.go:336] [DEBUG] file md5 33468e040ef4add042ce484a4753a075, etag: 78b9699361a66b9b2a819dfc6b667bee-1...
I1220 14:44:57.238962 12365 main.go:348] [Step 1] check the number of chunks the file has completed.
I1220 14:44:57.238969 12365 main.go:107] [DEBUG] check success chunks...
I1220 14:44:57.239000 12365 main.go:115] [DEBUG] send get request to http://localhost:8099/bff/versioneddataset/files/chunks?bucket=abc&bucketPath=dataset%2Fds1%2Fv1&etag=78b9699361a66b9b2a819dfc6b667bee-1&fileName=Users%2Flocal%2Fabc%2Ftest.json&md5=33468e040ef4add042ce484a4753a075
I1220 14:44:58.488674 12365 main.go:365] [Step 2] get new uploadid
I1220 14:44:58.488746 12365 main.go:148] [DEBUG] request new multipart uploadid...
I1220 14:44:58.488886 12365 main.go:161] [DEBUG] send post request to http://localhost:8099/bff/versioneddataset/files/chunks, with body {"chunkCount":1,"size":33554432,"md5":"33468e040ef4add042ce484a4753a075","fileName":"Users/local/abc/test.json","bucket":"abc","bucketPath":"dataset/ds1/v1"}...
I1220 14:44:58.586926 12365 main.go:376] [Step 3] tart uploading files based on uploadid ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLjM5ZjBkNWQwLWQxZGEtNGNjYy1iYTE2LTg5NDI5ODk5NTdjZA.
I1220 14:44:58.587419 12365 main.go:198] [DEBUG] request upload url by uploadid: ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLjM5ZjBkNWQwLWQxZGEtNGNjYy1iYTE2LTg5NDI5ODk5NTdjZA...
I1220 14:44:58.588190 12365 main.go:210] [DEBUG] send post request to http://localhost:8099/bff/versioneddataset/files/chunk_url, with body {"partNumber":1,"size":33554432,"md5":"33468e040ef4add042ce484a4753a075","uploadID":"ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLjM5ZjBkNWQwLWQxZGEtNGNjYy1iYTE2LTg5NDI5ODk5NTdjZA","bucket":"abc","bucketPath":"dataset/ds1/v1"}...
I1220 14:44:58.778700 12365 main.go:407] [Step 4], all chunks are uploaded successfully and merging of chunks begins.
I1220 14:44:58.778818 12365 main.go:241] [DEBUG] all chunks are uploaded, merge all chunks...
I1220 14:44:58.778930 12365 main.go:252] [DEBUG] send put request to http://localhost:8099/bff/versioneddataset/files/chunks, with body {"md5":"33468e040ef4add042ce484a4753a075","bucketPath":"dataset/ds1/v1","bucket":"abc","fileName":"Users/local/abc/test.json","uploadID":"ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLjM5ZjBkNWQwLWQxZGEtNGNjYy1iYTE2LTg5NDI5ODk5NTdjZA"}...
I1220 14:44:58.975929 12365 main.go:424] [Step 5], Congratulations, the file was uploaded successfully
I1220 14:44:58.979850 12365 main.go:288] [DEBUG] ***** part 0, md5 is 9e55ed2119037aa78ed8a705d54f7a07
I1220 14:44:58.979949 12365 main.go:336] [DEBUG] file md5 9e55ed2119037aa78ed8a705d54f7a07, etag: 2b57ad79766a5af824c17168e99f69a4-1...
I1220 14:44:58.980007 12365 main.go:348] [Step 1] check the number of chunks the file has completed.
I1220 14:44:58.980017 12365 main.go:107] [DEBUG] check success chunks...
I1220 14:44:58.980075 12365 main.go:115] [DEBUG] send get request to http://localhost:8099/bff/versioneddataset/files/chunks?bucket=abc&bucketPath=dataset%2Fds1%2Fv1&etag=2b57ad79766a5af824c17168e99f69a4-1&fileName=Users%2Flocal%2Fabc%2Ftesta.json&md5=9e55ed2119037aa78ed8a705d54f7a07
I1220 14:44:59.021077 12365 main.go:365] [Step 2] get new uploadid
I1220 14:44:59.021100 12365 main.go:148] [DEBUG] request new multipart uploadid...
I1220 14:44:59.021128 12365 main.go:161] [DEBUG] send post request to http://localhost:8099/bff/versioneddataset/files/chunks, with body {"chunkCount":1,"size":33554432,"md5":"9e55ed2119037aa78ed8a705d54f7a07","fileName":"Users/local/abc/testa.json","bucket":"abc","bucketPath":"dataset/ds1/v1"}...
I1220 14:44:59.045750 12365 main.go:376] [Step 3] tart uploading files based on uploadid ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLmYzYTZjMzE5LTU4OTUtNDJjNy05ZDg0LTNlNGY1Y2VjNDY5OQ.
I1220 14:44:59.045804 12365 main.go:198] [DEBUG] request upload url by uploadid: ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLmYzYTZjMzE5LTU4OTUtNDJjNy05ZDg0LTNlNGY1Y2VjNDY5OQ...
I1220 14:44:59.045817 12365 main.go:210] [DEBUG] send post request to http://localhost:8099/bff/versioneddataset/files/chunk_url, with body {"partNumber":1,"size":33554432,"md5":"9e55ed2119037aa78ed8a705d54f7a07","uploadID":"ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLmYzYTZjMzE5LTU4OTUtNDJjNy05ZDg0LTNlNGY1Y2VjNDY5OQ","bucket":"abc","bucketPath":"dataset/ds1/v1"}...
I1220 14:44:59.203346 12365 main.go:407] [Step 4], all chunks are uploaded successfully and merging of chunks begins.
I1220 14:44:59.203440 12365 main.go:241] [DEBUG] all chunks are uploaded, merge all chunks...
I1220 14:44:59.203494 12365 main.go:252] [DEBUG] send put request to http://localhost:8099/bff/versioneddataset/files/chunks, with body {"md5":"9e55ed2119037aa78ed8a705d54f7a07","bucketPath":"dataset/ds1/v1","bucket":"abc","fileName":"Users/local/abc/testa.json","uploadID":"ZWY3NzgyYWUtMjJiYi00NGEwLTkwZWEtYmY0NjE1NzlmZjMwLmYzYTZjMzE5LTU4OTUtNDJjNy05ZDg0LTNlNGY1Y2VjNDY5OQ"}...
I1220 14:44:59.444794 12365 main.go:424] [Step 5], Congratulations, the file was uploaded successfully
```
3. Download the file you just uploaded
```shell
./main --action=download --file=tmp.tar.gz --bucket-path=def --bucket=abc
Expand Down
38 changes: 32 additions & 6 deletions apiserver/examples/upload-download-file/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,12 @@ import (
"flag"
"fmt"
"io"
"io/fs"
"net/http"
"net/url"
"os"
"path/filepath"
"strings"
"sync"

"k8s.io/client-go/util/retry"
Expand Down Expand Up @@ -228,11 +231,11 @@ func genURL(
klog.Errorf("[Error] send genMultipartURL request error %s", err)
return GenChunkURLResult{}, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
klog.Infof("[Error] status code is %s, debug resp information %+v\n", resp.StatusCode, *resp)
return GenChunkURLResult{}, fmt.Errorf("response code is %d", resp.StatusCode)
}
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
if err != nil {
klog.Errorf("[Error] failed to read response body error %s", err)
Expand Down Expand Up @@ -333,6 +336,7 @@ func do(

func uploadFile(filePath, bucket, bucketPath string, tp http.RoundTripper) {
f, err := os.Open(filePath)
defer f.Close()
if err != nil {
panic(err)
}
Expand All @@ -347,11 +351,15 @@ func uploadFile(filePath, bucket, bucketPath string, tp http.RoundTripper) {
klog.Errorf("[Error] can't stat file size...")
return
}
fileName := filePath
if filepath.IsAbs(fileName) {
fileName = strings.TrimPrefix(fileName, "/")
}
step := 1

klog.Infof("[Step %d] check the number of chunks the file has completed.", step)
step++
completedChunks, err := successChunks(md5, bucket, bucketPath, filePath, etag, tp)
completedChunks, err := successChunks(md5, bucket, bucketPath, fileName, etag, tp)
if err != nil {
klog.Errorf("[!!!Error] failed to check completed chunks. error %s", err)
return
Expand All @@ -369,7 +377,7 @@ func uploadFile(filePath, bucket, bucketPath string, tp http.RoundTripper) {
klog.Infof("[Step %d] get new uploadid", step)
step++

uploadID, err := newMultipart(md5, bucket, bucketPath, filePath, bufSize, int(chunkCount), tp)
uploadID, err := newMultipart(md5, bucket, bucketPath, fileName, bufSize, int(chunkCount), tp)
if err != nil {
klog.Errorf("[!!!Error] failed to get new uplaodid. error %s", err)
return
Expand Down Expand Up @@ -397,7 +405,7 @@ func uploadFile(filePath, bucket, bucketPath string, tp http.RoundTripper) {

reader := io.NewSectionReader(f, int64(pn-1)*bufSize, bufSize)
go func(partNumber int, reader io.Reader) {
if err := do(&wg, reader, partNumber, bufSize, md5, completedChunks.UploadID, bucket, bucketPath, filePath, tp); err != nil {
if err := do(&wg, reader, partNumber, bufSize, md5, completedChunks.UploadID, bucket, bucketPath, fileName, tp); err != nil {
klog.Errorf("!!![Error] Uploading the %d(st,ne,rd,th) chunk of the file, an error occurs, but the operation will not affect the other chunks at this time, so only the error will be logged here.", partNumber)
lock <- struct{}{}
doComplete = false
Expand All @@ -414,7 +422,7 @@ func uploadFile(filePath, bucket, bucketPath string, tp http.RoundTripper) {
if err := retry.OnError(retry.DefaultRetry, func(err error) bool {
return true
}, func() error {
if err := complete(md5, bucket, bucketPath, completedChunks.UploadID, filePath, tp); err != nil {
if err := complete(md5, bucket, bucketPath, completedChunks.UploadID, fileName, tp); err != nil {
klog.Errorf("[!!!RetryError] retry %d, error %v", retryTimes, err)
retryTimes++
}
Expand Down Expand Up @@ -537,7 +545,25 @@ func main() {
tp := &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}}

if *action == upload {
uploadFile(*fileName, *bucket, *bucketPath, tp)
info, err := os.Stat(*fileName)
if err != nil {
klog.Errorf("[Error] can't stat file %s: %s", *fileName, err)
return
}
if info.IsDir() {
filepath.WalkDir(*fileName, func(path string, d fs.DirEntry, err error) error {
if err != nil {
klog.Errorf("[Error] failed access a path %s: %s", path, err)
return err
}
if !d.IsDir() {
uploadFile(path, *bucket, *bucketPath, tp)
}
return nil
})
} else {
uploadFile(*fileName, *bucket, *bucketPath, tp)
}
} else {
downloadFile(*bucket, *bucketPath, *fileName, tp)
}
Expand Down

0 comments on commit ac59b2e

Please sign in to comment.