英文:
Read a file, compress it and pipe the compressed output into S3
问题
我正在尝试编写一个工具,它将压缩一个目录并将压缩后的输出流传输到S3,而不先将其缓存到磁盘上。
package main
import (
"compress/gzip"
"io"
"log"
"os"
"sync"
"github.com/rlmcpherson/s3gof3r"
)
// log.Fatal() implies os.Exit(1)
func logerror(err error) {
if err != nil {
log.Fatalf("%s\n", err)
}
}
func main() {
k, err := s3gof3r.EnvKeys()
logerror(err)
// 打开要写入文件的存储桶
s3 := s3gof3r.New("", k)
bucket := s3.Bucket("somebucket")
// 打开要上传的文件
files, err := os.Open("somefile")
logerror(err)
defer files.Close()
// 打开PutWriter以进行S3上传
s3writer, err := bucket.PutWriter("somezipfile.gz", nil, nil)
logerror(err)
// 创建用于将gzip输出传递给putwriter输入的io管道
pipereader, pipewriter := io.Pipe()
defer pipereader.Close()
var wg sync.WaitGroup
wg.Add(2)
// 压缩
go func() {
defer wg.Done()
defer pipewriter.Close()
gw := gzip.NewWriter(pipewriter)
defer gw.Close()
_, err := io.Copy(gw, files)
logerror(err)
}()
// 传输
go func() {
defer wg.Done()
_, err := io.Copy(s3writer, pipereader)
logerror(err)
}()
wg.Wait()
}
当我编译和运行这段代码时,没有错误输出,也没有在S3中找到文件。如果有帮助的话,添加一些打印输出会得到以下结果:
files: &{0xc4200d0a00}
s3writer: &{{https <nil> somebucket.s3.amazonaws.com /somezipfile.gz false } 0xc4200d0a60 0xc420014540 20971520 [] 0 0xc42010e2a0 0 false <nil> {{} [0 0 0 0 0 0 0 0 0 0 0 0] 0} 0xc42010e300 0xc42010e360 0xc42035a740 0 97wUYO2YZPjLXqOLTma_Y1ASo.0IdeoKkif6pch60s3._J1suo9pUTCFwUj23uT.puzzDEHcV1KJPze.1EnLeoNehhBXeSpsH_.e4gXlNqBZ0HFsvyABJfHNYwUyXASx { []} 0}
pipewriter: &{0xc42013c180}
gzipwriter: &{{ [] 0001-01-01 00:00:00 +0000 UTC 255} 0xc420116020 -1 false <nil> 0 0 false [0 0 0 0 0 0 0 0 0 0] <nil>}
archive: 1283
upload: 606
希望能帮到你!
英文:
I'm trying to write a tool which will compress a directory and stream the compressed output into S3 without caching it on disk first.
package main
import (
"compress/gzip"
"io"
"log"
"os"
"sync"
"github.com/rlmcpherson/s3gof3r"
)
// log.Fatal() implies os.Exit(1)
func logerror(err error) {
if err != nil {
log.Fatalf("%s\n", err)
}
}
func main() {
k, err := s3gof3r.EnvKeys()
logerror(err)
// Open bucket we want to write a file to
s3 := s3gof3r.New("", k)
bucket := s3.Bucket("somebucket")
// Open file to upload
files, err := os.Open("somefile")
logerror(err)
defer files.Close()
// open a PutWriter for S3 upload
s3writer, err := bucket.PutWriter("somezipfile.gz", nil, nil)
logerror(err)
// Create io pipe for passing gzip output to putwriter input
pipereader, pipewriter := io.Pipe()
defer pipereader.Close()
var wg sync.WaitGroup
wg.Add(2)
// Compress
go func() {
defer wg.Done()
defer pipewriter.Close()
gw := gzip.NewWriter(pipewriter)
defer gw.Close()
_, err := io.Copy(gw, files)
logerror(err)
}()
// Transmit
go func() {
defer wg.Done()
_, err := io.Copy(s3writer, pipereader)
logerror(err)
}()
wg.Wait()
}
When I compile and run this, I get no error output and no file in S3. Adding a bunch of prints gets me the following output, if it's helpful:
files: &{0xc4200d0a00}
s3writer: &{{https <nil> somebucket.s3.amazonaws.com /somezipfile.gz false } 0xc4200d0a60 0xc420014540 20971520 [] 0 0xc42010e2a0 0 false <nil> {{} [0 0 0 0 0 0 0 0 0 0 0 0] 0} 0xc42010e300 0xc42010e360 0xc42035a740 0 97wUYO2YZPjLXqOLTma_Y1ASo.0IdeoKkif6pch60s3._J1suo9pUTCFwUj23uT.puzzDEHcV1KJPze.1EnLeoNehhBXeSpsH_.e4gXlNqBZ0HFsvyABJfHNYwUyXASx { []} 0}
pipewriter: &{0xc42013c180}
gzipwriter: &{{ [] 0001-01-01 00:00:00 +0000 UTC 255} 0xc420116020 -1 false <nil> 0 0 false [0 0 0 0 0 0 0 0 0 0] <nil>}
archive: 1283
upload: 606
Help appreciated!
答案1
得分: 1
我通过其他途径得到了一些帮助,这是工作代码:
package s3upload
import (
"compress/gzip"
"io"
"os"
"github.com/crielly/mongosnap/logger"
"github.com/rlmcpherson/s3gof3r"
)
// S3upload 将压缩的输出流传输到S3
func S3upload(toarchive, s3bucket, object string) {
keys, err := s3gof3r.EnvKeys()
logger.LogError(err)
// 打开要写入文件的存储桶
s3 := s3gof3r.New("", keys)
bucket := s3.Bucket(s3bucket)
// 打开一个PutWriter用于S3上传
s3writer, err := bucket.PutWriter(object, nil, nil)
logger.LogError(err)
defer s3writer.Close()
// 打开一个压缩写入器来处理gzip并将其传递给S3写入器
zipwriter := gzip.NewWriter(s3writer)
defer zipwriter.Close()
// 打开要归档的文件
file, err := os.Open(toarchive)
logger.LogError(err)
defer file.Close()
// 将打开的文件传递给压缩写入器
_, err = io.Copy(zipwriter, file)
logger.LogError(err)
}
希望对你有帮助!
英文:
I ended up getting some help via another avenue, the working code:|
<!-- language: golang -->
package s3upload
import (
"compress/gzip"
"io"
"os"
"github.com/crielly/mongosnap/logger"
"github.com/rlmcpherson/s3gof3r"
)
// S3upload streams compressed output to S3
func S3upload(toarchive, s3bucket, object string) {
keys, err := s3gof3r.EnvKeys()
logger.LogError(err)
// Open bucket we want to write a file to
s3 := s3gof3r.New("", keys)
bucket := s3.Bucket(s3bucket)
// open a PutWriter for S3 upload
s3writer, err := bucket.PutWriter(object, nil, nil)
logger.LogError(err)
defer s3writer.Close()
// Open a compressed writer to handle gzip and pass it to S3 writer
zipwriter := gzip.NewWriter(s3writer)
defer zipwriter.Close()
// Open files we want archived
file, err := os.Open(toarchive)
logger.LogError(err)
defer file.Close()
// Pass opened file to compression writer
_, err = io.Copy(zipwriter, file)
logger.LogError(err)
}
通过集体智慧和协作来改善编程学习和解决问题的方式。致力于成为全球开发者共同参与的知识库,让每个人都能够通过互相帮助和分享经验来进步。
评论