-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain.go
129 lines (100 loc) · 2.42 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
// Package main is the CLI wrapper for the crawler
package main
import (
"fmt"
"os"
"github.com/go-errors/errors"
"github.com/spf13/cobra"
"github.com/markelog/map/io"
"github.com/markelog/map/print"
"github.com/markelog/map/reporters"
"github.com/markelog/map/spider"
)
// Reporter name
var reporter string
// Out is the path data file
var out string
// Domains to follow
var domains string
// Command example
const example = `
Create map and output it to the terminal
$ map http://example.com
Create map and output map in yaml form
$ map http://example.com --reporter=yaml
Pipe it
$ map http://example.com -r yaml > example.com.yaml
Or use "out" flag to pipe (so you can see the spinner comparing with previous command :)
$ map http://example.com -r yaml --out=./example.com.yaml
With additional domains
$ map https://example.com --domains=www.google.ru,www.google.com
`
// Command config
var Command = &cobra.Command{
Use: "map https://example.com",
Short: "Site mapper",
Example: example,
Run: Run,
}
// Run the command!
func Run(cmd *cobra.Command, args []string) {
if len(args) == 0 {
print.Error(errors.New("Target is not specified"), 2)
return
}
// Reporter has to exist
if reporters.Exist(reporter) == false {
err := errors.New(`Reporter "` + reporter + `" does not exist`)
print.Error(err, 2)
return
}
crawler := spider.New(args[0], domains)
// Validate the input
print.Error(crawler.Validate(), 2)
// Crawl the site, show the spinner and determine the exit code
exitCode := print.Spin(crawler.Crawl())
// Get the result and send it to the reporter
data, err := crawler.Get()
print.Error(err, 1)
serialized, err := reporters.Execute(reporter, data)
print.Error(err, 1)
if len(serialized) > 0 {
// Either print to the console or save it to a file
if len(out) == 0 {
fmt.Println(serialized)
} else {
print.Error(io.WriteFile(out, serialized), 1)
}
}
os.Exit(exitCode)
}
// Init
func init() {
cobra.OnInitialize()
flags := Command.PersistentFlags()
flags.StringVarP(
&reporter,
"reporter",
"r",
"json",
"Show data in certain representation",
)
flags.StringVarP(
&out,
"out",
"o",
"",
"Output data to the file without pipe but with the spinner :)",
)
flags.StringVarP(
&domains,
"domains",
"d",
"",
"Domains to follow (as addition to the base url), comma as a delimter",
)
}
// Main
func main() {
Command.Execute()
}