task.go 2.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. package app
  2. import (
  3. "log"
  4. "sync"
  5. "time"
  6. "github.com/zu1k/proxypool/internal/cache"
  7. "github.com/zu1k/proxypool/internal/database"
  8. "github.com/zu1k/proxypool/pkg/provider"
  9. "github.com/zu1k/proxypool/pkg/proxy"
  10. )
  11. var location, _ = time.LoadLocation("PRC")
  12. func CrawlGo() {
  13. wg := &sync.WaitGroup{}
  14. var pc = make(chan proxy.Proxy)
  15. for _, g := range Getters {
  16. wg.Add(1)
  17. go g.Get2Chan(pc, wg)
  18. }
  19. proxies := cache.GetProxies("allproxies")
  20. proxies = append(proxies, database.GetAllProxies()...)
  21. go func() {
  22. wg.Wait()
  23. close(pc)
  24. }()
  25. for node := range pc {
  26. if node != nil {
  27. proxies = append(proxies, node)
  28. }
  29. }
  30. // 节点去重
  31. proxies = proxies.Deduplication()
  32. log.Println("CrawlGo node count:", len(proxies))
  33. proxies = provider.Clash{
  34. provider.Base{
  35. Proxies: &proxies,
  36. },
  37. }.CleanProxies()
  38. log.Println("CrawlGo cleaned node count:", len(proxies))
  39. proxies.NameAddCounrty().Sort().NameAddIndex().NameAddTG()
  40. log.Println("Proxy rename DONE!")
  41. // 全节点存储到数据库
  42. database.SaveProxyList(proxies)
  43. cache.SetProxies("allproxies", proxies)
  44. cache.AllProxiesCount = proxies.Len()
  45. log.Println("AllProxiesCount:", cache.AllProxiesCount)
  46. cache.SSProxiesCount = proxies.TypeLen("ss")
  47. log.Println("SSProxiesCount:", cache.SSProxiesCount)
  48. cache.SSRProxiesCount = proxies.TypeLen("ssr")
  49. log.Println("SSRProxiesCount:", cache.SSRProxiesCount)
  50. cache.VmessProxiesCount = proxies.TypeLen("vmess")
  51. log.Println("VmessProxiesCount:", cache.VmessProxiesCount)
  52. cache.TrojanProxiesCount = proxies.TypeLen("trojan")
  53. log.Println("TrojanProxiesCount:", cache.TrojanProxiesCount)
  54. cache.LastCrawlTime = time.Now().In(location).Format("2006-01-02 15:04:05")
  55. // 可用性检测
  56. log.Println("Now proceed proxy health check...")
  57. proxies = proxy.CleanBadProxiesWithGrpool(proxies)
  58. log.Println("CrawlGo clash usable node count:", len(proxies))
  59. proxies.NameReIndex()
  60. cache.SetProxies("proxies", proxies)
  61. cache.UsefullProxiesCount = proxies.Len()
  62. cache.SetString("clashproxies", provider.Clash{
  63. provider.Base{
  64. Proxies: &proxies,
  65. },
  66. }.Provide())
  67. cache.SetString("surgeproxies", provider.Surge{
  68. provider.Base{
  69. Proxies: &proxies,
  70. },
  71. }.Provide())
  72. }