Skip to content

Commit ef886f7

Browse files
add browserPath flag
1 parent 5d07c85 commit ef886f7

File tree

3 files changed

+32
-15
lines changed

3 files changed

+32
-15
lines changed

cmd/root.go

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,16 @@ var (
2121
threads int
2222
configPath string
2323
outputFile string
24+
browserPath string
2425
)
2526

2627
func init() {
2728
rootCmd.Flags().StringVarP(&singleURL, "url", "u", "", "Single target URL to scan (e.g. https://example.com)")
2829
rootCmd.Flags().StringVarP(&listFile, "list", "l", "", "File containing target URLs (one per line)")
2930
rootCmd.Flags().IntVarP(&threads, "threads", "t", 20, "Number of concurrent threads for scanning")
30-
rootCmd.Flags().StringVar(&configPath, "config", "config/config.yaml", "Path to config file (e.g. config.yaml)")
31+
rootCmd.Flags().StringVarP(&configPath, "config", "c", "config/config.yaml", "Path to config file (e.g. config.yaml)")
3132
rootCmd.Flags().StringVarP(&outputFile, "output", "o", "", "Output file (supports .txt, .csv, .json)")
33+
rootCmd.Flags().StringVarP(&browserPath, "browser", "b", "", "Path to Chrome/Chromium executable (optional). If not set, will use Rod's default.")
3234
}
3335

3436
var rootCmd = &cobra.Command{
@@ -57,7 +59,7 @@ var rootCmd = &cobra.Command{
5759
var toParse []string // 所有捕获的链接放入 toParse
5860

5961
// 2.1 收集加载某个目标 url 后(使用无头浏览器),默认加载的所有其他链接(js等)并放入 toParse
60-
err := crawler.CollectLinks(urls, threads, uniqueLinks, &toParse)
62+
err := crawler.CollectLinks(urls, threads, uniqueLinks, &toParse, browserPath)
6163
if err != nil {
6264
log.Fatalf("[!] Error collecting links: %v", err)
6365
}

internal/crawler/crawler.go

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
package crawler
22

33
import (
4-
"SecureJS/internal/utils" // 你自己项目里的包路径,如有不同需改
4+
"SecureJS/internal/utils"
55
"fmt"
66
"log"
77
"strings"
@@ -12,8 +12,6 @@ import (
1212
"github.com/go-rod/rod/lib/launcher"
1313
"github.com/go-rod/rod/lib/proto"
1414

15-
// 确保版本更新到包含 `Inject` 的版本
16-
//"github.com/go-rod/stealth"
1715
)
1816

1917
type CrawlResult struct {
@@ -25,15 +23,21 @@ type CrawlResult struct {
2523
// -----------------------------------------------------------
2624
// 并发爬取多个链接
2725
// -----------------------------------------------------------
28-
func crawlAll(urls []string, concurrency int) ([]*CrawlResult, error) {
26+
func crawlAll(urls []string, concurrency int, browserPath string) ([]*CrawlResult, error) {
2927
if len(urls) == 0 {
3028
return nil, fmt.Errorf("no URLs provided")
3129
}
3230
if concurrency <= 0 {
3331
concurrency = 1
3432
}
3533

36-
chromePath := launcher.NewBrowser().MustGet()
34+
var chromePath string
35+
if browserPath != "" {
36+
chromePath = browserPath
37+
} else {
38+
chromePath = launcher.NewBrowser().MustGet()
39+
}
40+
3741
u := launcher.New().
3842
Bin(chromePath).
3943
Headless(true). // 调试时可设置为 false
@@ -181,8 +185,8 @@ func tryFetchOneURL(browser *rod.Browser, url string, timeout time.Duration) (*C
181185
// -----------------------------------------------------------
182186
// 对外的接口,用于收集
183187
// -----------------------------------------------------------
184-
func CollectLinks(urls []string, threads int, uniqueLinks map[string]struct{}, toParse *[]string) error {
185-
results, err := crawlAll(urls, threads)
188+
func CollectLinks(urls []string, threads int, uniqueLinks map[string]struct{}, toParse *[]string, browserPath string) error {
189+
results, err := crawlAll(urls, threads, browserPath)
186190
if err != nil {
187191
return fmt.Errorf("failed to crawl: %v", err)
188192
}

main.go

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,23 @@
11
package main
22

33
import (
4-
"SecureJS/cmd"
5-
"log"
4+
"fmt"
5+
"os"
6+
"runtime/debug"
7+
8+
"SecureJS/cmd"
69
)
710

811
func main() {
9-
if err := cmd.Execute(); err != nil {
10-
log.Fatal(err)
11-
}
12-
}
12+
debug.SetTraceback("none")
13+
14+
// 2) 在最顶层拦截 panic
15+
defer func() {
16+
if r := recover(); r != nil {
17+
fmt.Fprintf(os.Stderr, "[!] An error occurred: %v\n", r)
18+
os.Exit(1)
19+
}
20+
}()
21+
22+
cmd.Execute()
23+
}

0 commit comments

Comments
 (0)