Skip to content

Commit

Permalink
Removed url validation from package, Added multiple urls support
Browse files Browse the repository at this point in the history
  • Loading branch information
ishanjain28 committed Sep 7, 2017
1 parent c6986e3 commit 9f73c73
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 21 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@

# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
.glide/
test
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ There are a lot of tool similar and better than Pluto but most of them have an u

go get github.com/ishanjain28/pluto

2. See the [Releases](https://github.com/ishanjain28/pluto/releases) for Precompiled Binaries
2. See the [Releases](https://github.com/ishanjain28/pluto/releases) section for Precompiled Binaries

### Package Example:

Expand Down
9 changes: 2 additions & 7 deletions pluto/pluto.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,12 @@ type FileMeta struct {
// then downloads the file by dividing it into given number of parts and downloading all parts concurrently.
// If any error occurs in the downloading stage of any part, It'll wait for 2 seconds, Discard the existing part and restart it.
// Discarding whatever bytes were downloaded isn't exactly a smart, So, I'll also be implementing a feature where it can skip over what is already downloaded.
func Download(link string, parts int) (*os.File, error) {
func Download(linkp *url.URL, parts int) (*os.File, error) {

if link == "" {
if linkp == nil {
return nil, fmt.Errorf("No URL Provided")
}

linkp, err := url.Parse(link)
if err != nil {
return nil, fmt.Errorf("error in parsing url: %v", err)
}

fmeta, err := FetchMeta(linkp)
if err != nil {
return nil, fmt.Errorf("error in fetching metadata: %v", err)
Expand Down
59 changes: 46 additions & 13 deletions pluto_cli.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ import (
"fmt"
"io"
"log"
"net/url"
"os"
"os/signal"
"path/filepath"
"syscall"
"time"

Expand All @@ -15,15 +17,6 @@ import (

func main() {

u := flag.String("url", "", "Download link of a file")

parts := flag.Int("part", 16, "Number of Download parts")

flag.Parse()
if *u == "" {
log.Fatalln("no url provided")
}

sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt, syscall.SIGINT, syscall.SIGTERM)

Expand All @@ -33,24 +26,64 @@ func main() {
os.Exit(0)
}()

parts := flag.Int("part", 16, "Number of Download parts")

flag.Parse()
urls := []string{}
u := ""

if len(os.Args) <= 1 {
fmt.Printf("URL: ")
fmt.Scanf("%s\n", &u)
if u == "" {
log.Fatalln("No URL Provided")
}

download(u, *parts)
} else {

if *parts == 0 {
urls = os.Args[1:]
} else {
urls = os.Args[2:]
}

for _, v := range urls {
download(v, *parts)
}
}

}

func download(u string, parts int) {
a := time.Now()
f, err := pluto.Download(*u, *parts)

up, err := url.Parse(u)
if err != nil {
log.Fatalln("Invalid URL")
}

fname := filepath.Base(up.String())

fmt.Printf("Downloading %s\n", up.String())

f, err := pluto.Download(up, parts)
if err != nil {
log.Fatalln(err)
}
defer f.Close()

file, err := os.Create("downloaded_file")
file, err := os.Create(fname)
if err != nil {
log.Fatalln(err.Error())
}

defer file.Close()
defer os.Remove(f.Name())

fmt.Printf("File Downloaded in %s", time.Since(a))
fmt.Printf("Downloaded %s in %s\n", up.String(), time.Since(a))
_, err = io.Copy(file, f)
if err != nil {
log.Fatalln(err.Error())
}

}

0 comments on commit 9f73c73

Please sign in to comment.