Skip to content

Commit

Permalink
Merge remote-tracking branch 'wiserain/mod'
Browse files Browse the repository at this point in the history
  • Loading branch information
Sakura-Byte committed Dec 25, 2024
2 parents cfdcd06 + 12305d0 commit 947bed3
Show file tree
Hide file tree
Showing 5 changed files with 377 additions and 269 deletions.
158 changes: 127 additions & 31 deletions backend/115/115.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import (
"io"
"net/http"
"path"
"reflect"
"regexp"
"strings"
"sync"
Expand All @@ -51,7 +52,7 @@ const (
rootURL = "https://webapi.115.com"
defaultUserAgent = "Mozilla/5.0 115Browser/27.0.7.5"

defaultMinSleep = fs.Duration(1000 * time.Millisecond) // 1 transactions per second
defaultMinSleep = fs.Duration(1032 * time.Millisecond)
maxSleep = 2 * time.Second
decayConstant = 2 // bigger for slower decay, exponential

Expand Down Expand Up @@ -104,6 +105,10 @@ Additionally, you can provide a comma-separated list of cookies to distribute re
across multiple client instances for load balancing.`,
Required: true,
Sensitive: true,
}, {
Name: "cookie_from",
Help: `Specify a space-separated list of remote names to read cookies from`,
Advanced: true,
}, {
Name: "share_code",
Help: "share code from share link",
Expand Down Expand Up @@ -255,7 +260,8 @@ type Options struct {
CID string `config:"cid"`
SEID string `config:"seid"`
KID string `config:"kid"`
Cookie string `config:"cookie"`
Cookie fs.CommaSepList `config:"cookie"`
CookieFrom fs.SpaceSepList `config:"cookie_from"`
ShareCode string `config:"share_code"`
ReceiveCode string `config:"receive_code"`
UserAgent string `config:"user_agent"`
Expand All @@ -270,7 +276,7 @@ type Options struct {
ChunkSize fs.SizeSuffix `config:"chunk_size"`
MaxUploadParts int `config:"max_upload_parts"`
UploadConcurrency int `config:"upload_concurrency"`
DownloadCookie string `config:"download_cookie"`
DownloadCookie fs.CommaSepList `config:"download_cookie"`
DownloadNoProxy bool `config:"download_no_proxy"`
Enc encoder.MultiEncoder `config:"encoding"`
}
Expand Down Expand Up @@ -377,14 +383,27 @@ func errorHandler(resp *http.Response) error {
return errResponse
}

// getCookies extracts UID, CID, SEID and KID from a cookie string and returns of a list of *http.Cookie
func getCookies(cookie string) (cks []*http.Cookie) {
if cookie == "" {
return
type Credential struct {
UID string
CID string
SEID string
KID string
}

// Valid reports whether the credential is valid.
func (cr *Credential) Valid() error {
if cr == nil {
return fmt.Errorf("nil credential")
}
if cr.UID == "" || cr.CID == "" || cr.SEID == "" {
return fmt.Errorf("missing UID, CID, or SEID")
}
return nil
}

items := strings.Split(cookie, ";")
for _, item := range items {
// FromCookie loads credential from cookie string
func (cr *Credential) FromCookie(cookieStr string) *Credential {
for _, item := range strings.Split(cookieStr, ";") {
kv := strings.Split(strings.TrimSpace(item), "=")
if len(kv) != 2 {
continue
Expand All @@ -393,20 +412,34 @@ func getCookies(cookie string) (cks []*http.Cookie) {
val := strings.TrimSpace(kv[1])
switch key {
case "UID", "CID", "SEID", "KID":
if val != "" {
cks = append(cks, &http.Cookie{Name: key, Value: val, Domain: domain, Path: "/", HttpOnly: true})
}
reflect.ValueOf(cr).Elem().FieldByName(key).SetString(val)
}
}
return
return cr
}

// Cookie turns the credential into a list of http cookie
func (cr *Credential) Cookie() []*http.Cookie {
return []*http.Cookie{
{Name: "UID", Value: cr.UID, Domain: domain, Path: "/", HttpOnly: true},
{Name: "CID", Value: cr.CID, Domain: domain, Path: "/", HttpOnly: true},
{Name: "KID", Value: cr.KID, Domain: domain, Path: "/", HttpOnly: true},
{Name: "SEID", Value: cr.SEID, Domain: domain, Path: "/", HttpOnly: true},
}
}

// UserID parses userID from UID field
func (cr *Credential) UserID() string {
userID, _, _ := strings.Cut(cr.UID, "_")
return userID
}

// getClient makes an http client according to the options
func getClient(ctx context.Context, opt *Options) *http.Client {
t := fshttp.NewTransportCustom(ctx, func(t *http.Transport) {
t.TLSHandshakeTimeout = time.Duration(opt.ConTimeout)
t.ResponseHeaderTimeout = time.Duration(opt.Timeout)
if opt.DownloadCookie != "" && opt.DownloadNoProxy {
if len(opt.DownloadCookie) != 0 && opt.DownloadNoProxy {
t.Proxy = nil
}
})
Expand All @@ -420,6 +453,7 @@ type poolClient struct {
clients []*rest.Client
clientMu *sync.Mutex
currentIndex int
credentials []*Credential
}

func (p *poolClient) client() *rest.Client {
Expand All @@ -445,22 +479,64 @@ func (p *poolClient) Do(req *http.Request) (*http.Response, error) {
return p.client().Do(req)
}

func newPoolClient(ctx context.Context, opt *Options, cookies string) *poolClient {
var clients []*rest.Client
for _, cookie := range strings.Split(cookies, ",") {
if cks := getCookies(cookie); len(cks) == 4 {
cli := rest.NewClient(getClient(ctx, opt)).SetRoot(rootURL).SetErrorHandler(errorHandler)
cli.SetCookie(cks...)
clients = append(clients, cli)
func newPoolClient(ctx context.Context, opt *Options, cookies fs.CommaSepList) (pc *poolClient, err error) {
// cookies -> credentials
var creds []*Credential
seen := make(map[string]bool)
for _, cookie := range cookies {
cred := (&Credential{}).FromCookie(cookie)
if err = cred.Valid(); err != nil {
return nil, fmt.Errorf("%w: %q", err, cookie)
}
if seen[cred.UID] {
return nil, fmt.Errorf("duplicate UID: %q", cookie)
}
seen[cred.UID] = true
creds = append(creds, cred)
}
if len(clients) == 0 {
return nil
if len(creds) == 0 {
return nil, nil
}
if len(creds) > 1 {
UserID := creds[0].UserID()
for _, cred := range creds[1:] {
if user := cred.UserID(); UserID != user {
return nil, fmt.Errorf("inconsistent UserID: %s != %s", UserID, user)
}
}
}

// credentials -> rest clients
var clients []*rest.Client
for _, cred := range creds {
cli := rest.NewClient(getClient(ctx, opt)).SetRoot(rootURL).SetErrorHandler(errorHandler)
clients = append(clients, cli.SetCookie(cred.Cookie()...))
}
return &poolClient{
clients: clients,
clientMu: new(sync.Mutex),
clients: clients,
clientMu: new(sync.Mutex),
credentials: creds,
}, nil
}

// getCookieFrom retrieves a cookie from `remote` configured in rclone.conf
func getCookieFrom(remote string) (cookie fs.CommaSepList, err error) {
fsInfo, _, _, config, err := fs.ConfigFs(remote)
if err != nil {
return nil, err
}
if fsInfo.Name != "115" {
return nil, fmt.Errorf("not 115 remote")
}
opt := new(Options)
err = configstruct.Set(config, opt)
if err != nil {
return nil, err
}
if len(opt.Cookie) == 0 {
return nil, fmt.Errorf("empty cookie")
}
return opt.Cookie, nil
}

// newClientWithPacer sets a new pool client with a pacer to Fs
Expand All @@ -469,19 +545,39 @@ func (f *Fs) newClientWithPacer(ctx context.Context, opt *Options) (err error) {
newCtx, ci := fs.AddConfig(ctx)
ci.UserAgent = opt.UserAgent

f.srv = newPoolClient(newCtx, opt, opt.Cookie)
var remoteCookies fs.CommaSepList
for _, remote := range opt.CookieFrom {
cookie, err := getCookieFrom(remote)
if err != nil {
return fmt.Errorf("couldn't get cookie from %q: %w", remote, err)
}
remoteCookies = append(remoteCookies, cookie...)
}
if f.srv, err = newPoolClient(newCtx, opt, remoteCookies); err != nil {
return err
}
if f.srv == nil {
// if not found from opt.Cookie
cookie := fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", opt.UID, opt.CID, opt.SEID, opt.KID)
f.srv = newPoolClient(newCtx, opt, cookie)
// if not any from opt.CookieFrom
if f.srv, err = newPoolClient(newCtx, opt, opt.Cookie); err != nil {
return err
}
}
if f.srv == nil {
// if not any from opt.Cookie
oldCookie := fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", opt.UID, opt.CID, opt.SEID, opt.KID)
if f.srv, err = newPoolClient(newCtx, opt, fs.CommaSepList{oldCookie}); err != nil {
return err
}
}
if f.srv == nil {
return fmt.Errorf("no cookies")
}

// download-only clients
f.dsrv = newPoolClient(newCtx, opt, opt.DownloadCookie)
f.userID, _, _ = strings.Cut(opt.UID, "_")
if f.dsrv, err = newPoolClient(newCtx, opt, opt.DownloadCookie); err != nil {
return err
}
f.userID = f.srv.credentials[0].UserID()
adjustedMinSleep := time.Duration(opt.PacerMinSleep)
if numClients := len(f.srv.clients); numClients > 1 {
adjustedMinSleep /= time.Duration(numClients)
Expand Down
41 changes: 33 additions & 8 deletions backend/115/api/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"encoding/json"
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"time"
Expand Down Expand Up @@ -270,18 +271,42 @@ type SizeInfo struct {
// ------------------------------------------------------------

type DownloadURL struct {
URL string `json:"url"`
Client Int `json:"client"`
Desc string `json:"desc"`
OssID string `json:"oss_id"`
Cookies []*http.Cookie
CreateTime time.Time
URL string `json:"url"`
Client Int `json:"client"`
Desc string `json:"desc"`
OssID string `json:"oss_id"`
Cookies []*http.Cookie
}

// expiry parses expiry from URL parameter t
func (u *DownloadURL) expiry() time.Time {
if p, err := url.Parse(u.URL); err == nil {
if q, err := url.ParseQuery(p.RawQuery); err == nil {
if t := q.Get("t"); t != "" {
if i, err := strconv.ParseInt(t, 10, 64); err == nil {
return time.Unix(i, 0)
}
}
}
}
return time.Time{}
}

// expired reports whether the token is expired.
// u must be non-nil.
func (u *DownloadURL) expired() bool {
expiry := u.expiry()
if expiry.IsZero() {
return false
}

expiryDelta := time.Duration(10) * time.Second
return expiry.Round(0).Add(-expiryDelta).Before(time.Now())
}

// Valid reports whether u is non-nil, has an URL, and is not expired.
func (u *DownloadURL) Valid() bool {
return u != nil && u.URL != "" && time.Since(u.CreateTime) < 100*time.Second
// TODO: how sure for 100s expiry
return u != nil && u.URL != "" && !u.expired()
}

func (u *DownloadURL) Cookie() string {
Expand Down
5 changes: 1 addition & 4 deletions backend/115/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -377,8 +377,7 @@ func (f *Fs) _getDownloadURL(ctx context.Context, input []byte) (output []byte,
if err != nil {
return nil, nil, fmt.Errorf("failed to decode data: %w", err)
}
cookies = append(cookies, resp.Cookies()...) // including uid, cid, and seid
cookies = append(cookies, resp.Request.Cookies()...) // including access key value pari with Max-Age=900
cookies = append(cookies, resp.Cookies()...) // including access key value pair with Max-Age=900
return
}

Expand All @@ -397,7 +396,6 @@ func (f *Fs) getDownloadURL(ctx context.Context, pickCode string) (durl *api.Dow
for _, downInfo := range downData {
durl = &downInfo.URL
durl.Cookies = cookies
durl.CreateTime = time.Now()
return
}
return nil, fs.ErrorObjectNotFound
Expand Down Expand Up @@ -683,6 +681,5 @@ func (f *Fs) getDownloadURLFromShare(ctx context.Context, fid string) (durl *api

durl = &downInfo.URL
durl.Cookies = cookies
durl.CreateTime = time.Now()
return
}
Loading

0 comments on commit 947bed3

Please sign in to comment.