change log

main
snakem982 1 year ago
parent ee8e4d0232
commit 0bcf5ffa7d

1
.gitignore vendored

@ -0,0 +1 @@
/test/

@ -0,0 +1,5 @@
package config
func init() {
}

@ -2,7 +2,7 @@ package spider
import (
"github.com/metacubex/mihomo/config"
"github.com/metacubex/mihomo/log"
"log"
"sync"
)
@ -31,7 +31,7 @@ func (c *Clash) Get() []map[string]any {
func (c *Clash) Get2ChanWG(pc chan []map[string]any, wg *sync.WaitGroup) {
defer wg.Done()
nodes := c.Get()
log.Infoln("STATISTIC: Clash count=%d url=%s", len(nodes), c.Url)
log.Printf("STATISTIC: Clash count=%d url=%s\n", len(nodes), c.Url)
if len(nodes) > 0 {
pc <- nodes
}

@ -3,7 +3,7 @@ package spider
import (
"github.com/metacubex/mihomo/common/convert"
"github.com/metacubex/mihomo/config"
"github.com/metacubex/mihomo/log"
"log"
"regexp"
"strings"
"sync"
@ -26,7 +26,7 @@ func (c *Fuzzy) Get() []map[string]any {
func (c *Fuzzy) Get2ChanWG(pc chan []map[string]any, wg *sync.WaitGroup) {
defer wg.Done()
nodes := c.Get()
log.Infoln("STATISTIC: Fuzzy count=%d url=%s", len(nodes), c.Url)
log.Printf("STATISTIC: Fuzzy count=%d url=%s\n", len(nodes), c.Url)
if len(nodes) > 0 {
pc <- nodes
}
@ -78,7 +78,7 @@ func ComputeFuzzy(content []byte) []map[string]any {
defer func() {
// 每个协程内部使用recover捕获可能在调用逻辑中发生的panic
if e := recover(); e != nil {
log.Errorln("====抓取错误====%s", e)
log.Printf("====抓取错误====%s\n", e)
}
done <- struct{}{}
}()

@ -2,7 +2,7 @@ package spider
import (
"github.com/metacubex/mihomo/common/convert"
"github.com/metacubex/mihomo/log"
"log"
"regexp"
"strings"
"sync"
@ -39,7 +39,7 @@ func (c *ShareLink) Get() []map[string]any {
func (c *ShareLink) Get2ChanWG(pc chan []map[string]any, wg *sync.WaitGroup) {
defer wg.Done()
nodes := c.Get()
log.Infoln("STATISTIC: ShareLink count=%d url=%s", len(nodes), c.Url)
log.Printf("STATISTIC: ShareLink count=%d url=%s\n", len(nodes), c.Url)
if len(nodes) > 0 {
pc <- nodes
}

@ -11,9 +11,9 @@ import (
"github.com/metacubex/mihomo/common/utils"
"github.com/metacubex/mihomo/config"
C "github.com/metacubex/mihomo/constant"
"github.com/metacubex/mihomo/log"
"github.com/snakem982/proxypool/tools"
"gopkg.in/yaml.v3"
"log"
"net"
"os"
"path/filepath"
@ -78,7 +78,7 @@ func Crawl() bool {
rawCfg, err := config.UnmarshalRawConfig(defaultBuf)
if err == nil && len(rawCfg.Proxy) > 0 {
proxies = rawCfg.Proxy
log.Infoln("load default config proxies success %d", len(rawCfg.Proxy))
log.Printf("load default config proxies success %d\n", len(rawCfg.Proxy))
}
}
@ -245,7 +245,7 @@ func map2proxies(maps map[string]map[string]any) (proxies []C.Proxy) {
pool.SubmitWithTimeout(func(done chan struct{}) {
defer func() {
if e := recover(); e != nil {
log.Errorln("===map2proxies===%s", e)
log.Printf("===map2proxies===%s\n", e)
}
done <- struct{}{}
}()
@ -276,7 +276,7 @@ func urlTest(proxies []C.Proxy) []string {
pool.SubmitWithTimeout(func(done chan struct{}) {
defer func() {
if e := recover(); e != nil {
log.Errorln("===urlTest===%s", e)
log.Printf("===urlTest===%s\n", e)
}
done <- struct{}{}
}()

@ -2,7 +2,7 @@ package spider
import (
"github.com/metacubex/mihomo/common/convert"
"github.com/metacubex/mihomo/log"
"log"
"sync"
)
@ -31,7 +31,7 @@ func (c *V2ray) Get() []map[string]any {
func (c *V2ray) Get2ChanWG(pc chan []map[string]any, wg *sync.WaitGroup) {
defer wg.Done()
nodes := c.Get()
log.Infoln("STATISTIC: V2ray count=%d url=%s", len(nodes), c.Url)
log.Printf("STATISTIC: V2ray count=%d url=%s\n", len(nodes), c.Url)
if len(nodes) > 0 {
pc <- nodes
}

@ -4,9 +4,9 @@ import (
"crypto/tls"
"fmt"
C "github.com/metacubex/mihomo/constant"
"github.com/metacubex/mihomo/log"
"golang.org/x/net/context"
"io"
"log"
"net"
"net/http"
"net/url"
@ -47,7 +47,7 @@ func HttpGetByProxy(requestUrl, httpProxyUrl string) ([]byte, error) {
// 创建一个GET请求
req, err := http.NewRequest(http.MethodGet, requestUrl, nil)
if err != nil {
log.Warnln("HttpGetByProxy http.NewRequest %s %v", requestUrl, err)
log.Printf("HttpGetByProxy http.NewRequest %s %v\n", requestUrl, err)
return nil, err
}
req.Header.Set("Accept-Encoding", "utf-8")
@ -57,7 +57,7 @@ func HttpGetByProxy(requestUrl, httpProxyUrl string) ([]byte, error) {
// 发送请求并获取响应
resp, err := client.Do(req)
if err != nil {
log.Warnln("HttpGetByProxy client.Do %s %v", requestUrl, err)
log.Printf("HttpGetByProxy client.Do %s %v\n", requestUrl, err)
return nil, err
}
defer func(Body io.ReadCloser) {
@ -69,12 +69,12 @@ func HttpGetByProxy(requestUrl, httpProxyUrl string) ([]byte, error) {
// 读取响应数据
data, err := io.ReadAll(resp.Body)
if err != nil {
log.Warnln("HttpGetByProxy io.ReadAll %s %v", requestUrl, err)
log.Printf("HttpGetByProxy io.ReadAll %s %v\n", requestUrl, err)
return nil, err
}
if resp.StatusCode != http.StatusOK {
log.Warnln("HttpGetByProxy StatusCode %s %d", requestUrl, resp.StatusCode)
log.Printf("HttpGetByProxy StatusCode %s %d\n", requestUrl, resp.StatusCode)
return nil, fmt.Errorf("StatusCode %d", resp.StatusCode)
}
@ -83,7 +83,7 @@ func HttpGetByProxy(requestUrl, httpProxyUrl string) ([]byte, error) {
// HttpGet 使用HTTP GET方法请求指定的URL并返回响应的数据和可能的错误。
func HttpGet(requestUrl string) ([]byte, error) {
timeOut := 20 * time.Second
timeOut := 15 * time.Second
return HttpGetWithTimeout(requestUrl, timeOut, true)
}
@ -102,7 +102,7 @@ func HttpGetWithTimeout(requestUrl string, outTime time.Duration, needDail bool)
req, err := http.NewRequest(http.MethodGet, requestUrl, nil) // 创建一个新的GET请求
if err != nil {
log.Warnln("HttpGetWithTimeout http.NewRequest %s %v", requestUrl, err)
log.Printf("HttpGetWithTimeout http.NewRequest %s %v\n", requestUrl, err)
return nil, err
}
req.Header.Set("Accept-Encoding", "utf-8") // 设置响应内容编码为utf-8
@ -111,7 +111,7 @@ func HttpGetWithTimeout(requestUrl string, outTime time.Duration, needDail bool)
resp, err := client.Do(req) // 发送请求并获取响应
if err != nil {
log.Warnln("HttpGetWithTimeout client.Do %s %v", requestUrl, err)
log.Printf("HttpGetWithTimeout client.Do %s %v\n", requestUrl, err)
return nil, err
}
defer func(Body io.ReadCloser) {
@ -122,12 +122,12 @@ func HttpGetWithTimeout(requestUrl string, outTime time.Duration, needDail bool)
}(resp.Body)
data, err := io.ReadAll(resp.Body) // 读取响应体的数据
if err != nil {
log.Warnln("HttpGetWithTimeout io.ReadAll %s %v", requestUrl, err)
log.Printf("HttpGetWithTimeout io.ReadAll %s %v\n", requestUrl, err)
return nil, err
}
if resp.StatusCode != http.StatusOK {
log.Warnln("HttpGetWithTimeout StatusCode %s %d", requestUrl, resp.StatusCode)
log.Printf("HttpGetWithTimeout StatusCode %s %d\n", requestUrl, resp.StatusCode)
return nil, fmt.Errorf("StatusCode %d", resp.StatusCode)
}

@ -1,7 +1,6 @@
package tools
import (
"github.com/metacubex/mihomo/log"
"sync"
"time"
)
@ -59,8 +58,7 @@ func (s *snowflake) nextVal() int64 {
t := now - epoch
if t > timestampMax {
s.Unlock()
log.Fatalln("epoch must be between 0 and %d", timestampMax-1)
return 0
return epoch
}
s.timestamp = now
r := (t)<<timestampShift | (s.datacenterid << datacenteridShift) | (s.workerid << workeridShift) | (s.sequence)

Loading…
Cancel
Save