增加robots.txt文件预防爬虫

This commit is contained in:
ayflying
2025-12-08 10:36:29 +08:00
parent 9fb0513703
commit 6b304efc0e
3 changed files with 24 additions and 0 deletions

View File

@@ -12,4 +12,5 @@ import (
type ICallbackV1 interface { type ICallbackV1 interface {
Ip(ctx context.Context, req *v1.IpReq) (res *v1.IpRes, err error) Ip(ctx context.Context, req *v1.IpReq) (res *v1.IpRes, err error)
Robots(ctx context.Context, req *v1.RobotsReq) (res *v1.RobotsRes, err error)
} }

View File

@@ -0,0 +1,9 @@
package v1
import "github.com/gogf/gf/v2/frame/g"
type RobotsReq struct {
g.Meta `path:"/robots.txt" tags:"回调响应" method:"get" summary:"禁止爬虫"`
}
type RobotsRes struct {
}

View File

@@ -0,0 +1,14 @@
package callback
import (
"context"
"github.com/ayflying/utility_go/api/callback/v1"
"github.com/gogf/gf/v2/frame/g"
)
func (c *ControllerV1) Robots(ctx context.Context, req *v1.RobotsReq) (res *v1.RobotsRes, err error) {
text := "User-agent: *\nDisallow: /"
g.RequestFromCtx(ctx).Response.Write(text)
return
}