这是一个如何设置并发下载程序的示例。需要注意的是带宽、内存和磁盘空间。你可以通过一次做很多事情来减少带宽,内存也是如此。你下载了相当大的文件,所以内存可能会成为一个问题。另一件需要注意的事情是,使用goruntines会丢失请求顺序。因此,如果返回字节的顺序很重要,那么这将不起作用,因为您必须知道最终组装文件的字节顺序,这意味着一次下载一个是最好的,除非您实现一种跟踪顺序的方法(可能是某种全局映射[order int][]字节和互斥体以防止竞争条件)。一个不涉及
Go
(假设您有一台unix机器以方便使用)
Curl
http://osxdaily.com/2014/02/13/download-with-curl/
package main
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"sync"
)
// now your going to have to be careful because you can potentially run out of memory downloading to many files at once..
// however here is an example that can be modded
func downloader(wg *sync.WaitGroup, sema chan struct{}, fileNum int, URL string) {
sema <- struct{}{}
defer func() {
<-sema
wg.Done()
}()
client := &http.Client{Timeout: 10}
res, err := client.Get(URL)
if err != nil {
log.Fatal(err)
}
defer res.Body.Close()
var buf bytes.Buffer
// I'm copying to a buffer before writing it to file
// I could also just use IO copy to write it to the file
// directly and save memory by dumping to the disk directly.
io.Copy(&buf, res.Body)
// write the bytes to file
ioutil.WriteFile(fmt.Sprintf("file%d.txt", fileNum), buf.Bytes(), 0644)
return
}
func main() {
links := []string{
"url1",
"url2", // etc...
}
var wg sync.WaitGroup
// limit to four downloads at a time, this is called a semaphore
limiter := make(chan struct{}, 4)
for i, link := range links {
wg.Add(1)
go downloader(&wg, limiter, i, link)
}
wg.Wait()
}