task.go 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455
  1. package app
  2. import (
  3. "log"
  4. "sync"
  5. "time"
  6. "github.com/zu1k/proxypool/internal/cache"
  7. "github.com/zu1k/proxypool/pkg/provider"
  8. "github.com/zu1k/proxypool/pkg/proxy"
  9. )
  10. var location, _ = time.LoadLocation("PRC")
  11. func CrawlGo() {
  12. wg := &sync.WaitGroup{}
  13. var pc = make(chan proxy.Proxy)
  14. for _, g := range Getters {
  15. wg.Add(1)
  16. go g.Get2Chan(pc, wg)
  17. }
  18. proxies := cache.GetProxies("proxies")
  19. go func() {
  20. wg.Wait()
  21. close(pc)
  22. }()
  23. for node := range pc {
  24. if node != nil {
  25. proxies = append(proxies, node)
  26. }
  27. }
  28. // 节点去重
  29. proxies = proxies.Deduplication()
  30. log.Println("CrawlGo node count:", len(proxies))
  31. proxies = provider.Clash{Proxies: proxies}.CleanProxies()
  32. proxies.NameAddCounrty().Sort().NameAddIndex()
  33. cache.SetProxies("allproxies", proxies)
  34. cache.GettersCount = len(Getters)
  35. cache.AllProxiesCount = proxies.Len()
  36. cache.SSProxiesCount = proxies.TypeLen("ss")
  37. cache.SSRProxiesCount = proxies.TypeLen("ssr")
  38. cache.VmessProxiesCount = proxies.TypeLen("vmess")
  39. cache.TrojanProxiesCount = proxies.TypeLen("trojan")
  40. cache.LastCrawlTime = time.Now().In(location).Format("2006-01-02 15:04:05")
  41. // 可用性检测
  42. proxies = proxy.CleanBadProxies(proxies)
  43. log.Println("CrawlGo clash useable node count:", len(proxies))
  44. proxies.NameAddCounrty().Sort().NameAddIndex()
  45. cache.SetProxies("proxies", proxies)
  46. cache.UsefullProxiesCount = proxies.Len()
  47. cache.SetString("clashproxies", provider.Clash{Proxies: proxies}.Provide())
  48. cache.SetString("surgeproxies", provider.Surge{Proxies: proxies}.Provide())
  49. }