增加robots.txt文件预防爬虫
This commit is contained in:
@@ -12,4 +12,5 @@ import (
|
|||||||
|
|
||||||
type ICallbackV1 interface {
|
type ICallbackV1 interface {
|
||||||
Ip(ctx context.Context, req *v1.IpReq) (res *v1.IpRes, err error)
|
Ip(ctx context.Context, req *v1.IpReq) (res *v1.IpRes, err error)
|
||||||
|
Robots(ctx context.Context, req *v1.RobotsReq) (res *v1.RobotsRes, err error)
|
||||||
}
|
}
|
||||||
|
|||||||
9
api/callback/v1/robots.go
Normal file
9
api/callback/v1/robots.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import "github.com/gogf/gf/v2/frame/g"
|
||||||
|
|
||||||
|
type RobotsReq struct {
|
||||||
|
g.Meta `path:"/robots.txt" tags:"回调响应" method:"get" summary:"禁止爬虫"`
|
||||||
|
}
|
||||||
|
type RobotsRes struct {
|
||||||
|
}
|
||||||
14
controller/callback/callback_v1_robots.go
Normal file
14
controller/callback/callback_v1_robots.go
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
package callback
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/ayflying/utility_go/api/callback/v1"
|
||||||
|
"github.com/gogf/gf/v2/frame/g"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *ControllerV1) Robots(ctx context.Context, req *v1.RobotsReq) (res *v1.RobotsRes, err error) {
|
||||||
|
text := "User-agent: *\nDisallow: /"
|
||||||
|
g.RequestFromCtx(ctx).Response.Write(text)
|
||||||
|
return
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user