Skip to content

Commit

Permalink
Merge remote-tracking branch 'wiserain/mod'
Browse files Browse the repository at this point in the history
  • Loading branch information
Sakura-Byte committed Feb 1, 2025
2 parents 17fc232 + 1c4c260 commit 1255be5
Show file tree
Hide file tree
Showing 163 changed files with 13,889 additions and 1,790 deletions.
30 changes: 15 additions & 15 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
uses: ncipollo/release-action@v1
with:
tag: ${{ github.ref }}
name: rclone mod
name: rclone ${{ github.ref_name }}
body: |
## What's Changed
Expand All @@ -44,28 +44,16 @@ jobs:
draft: true
prerelease: false
-
name: Create Beta
if: ${{ ! contains(github.ref, 'refs/tags/') }}
id: new_beta
uses: ncipollo/release-action@v1
with:
tag: ${{ github.ref }}
name: rclone mod - beta
draft: true
prerelease: false
-
name: Set Upload URL
id: set_upload_url
run: |
if [ "${{ contains(github.ref, 'refs/tags/') }}" = "true" ]; then
echo "upload_url=${{ steps.new_release.outputs.upload_url }}" >> "$GITHUB_OUTPUT"
else
echo "upload_url=${{ steps.new_beta.outputs.upload_url }}" >> "$GITHUB_OUTPUT"
fi
build:
needs:
- create-release
if: ${{ github.event.inputs.manual == 'true' || (github.repository == 'Sakura-Byte/rclone' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name)) }}
if: ${{ inputs.manual || (github.repository == 'Sakura-Byte/rclone' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name)) }}
timeout-minutes: 60
strategy:
fail-fast: false
Expand Down Expand Up @@ -260,14 +248,25 @@ jobs:
# Deploy binaries if enabled in config && not a PR && not a fork
if: matrix.deploy && github.head_ref == '' && github.repository == 'Sakura-Byte/rclone'
-
if: ${{ contains(github.ref, 'refs/tags/') }}
name: Upload Assets to Release
uses: shogo82148/actions-upload-release-asset@v1
with:
upload_url: ${{ needs.create-release.outputs.upload_url }}
asset_path: 'build/*'
-
if: ${{ ! contains(github.ref, 'refs/tags/') }}
name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.job_name }}
if-no-files-found: ignore
path: |
build/*amd64.zip
build/*arm64.zip
lint:
if: ${{ github.event.inputs.manual == 'true' || (github.repository == 'Sakura-Byte/rclone' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name)) }}
if: ${{ inputs.manual || (github.repository == 'Sakura-Byte/rclone' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name)) }}
timeout-minutes: 30
name: "lint"
runs-on: ubuntu-latest
Expand Down Expand Up @@ -511,6 +510,7 @@ jobs:
cd ../output
for file in *.deb; do mvto=$(sed -r "s/^rclone_(.*)_(.*)$/rclone-${{ env.version_all }}-termux-\2/g" <<< ${file}); mv "$file" "$mvto"; done
-
if: ${{ contains(github.ref, 'refs/tags/') }}
name: Upload Assets to Release
shell: bash
run: |
Expand Down
2,231 changes: 1,851 additions & 380 deletions MANUAL.html

Large diffs are not rendered by default.

2,274 changes: 2,169 additions & 105 deletions MANUAL.md

Large diffs are not rendered by default.

2,282 changes: 2,180 additions & 102 deletions MANUAL.txt

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,16 @@ build.
Once it compiles locally, push it on a test branch and commit fixes
until the tests pass.

### Major versions

The above procedure will not upgrade major versions, so v2 to v3.
However this tool can show which major versions might need to be
upgraded:

go run github.com/icholy/gomajor@latest list -major

Expect API breakage when updating major versions.

## Tidy beta

At some point after the release run
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v1.69.0-219
v1.69.0
80 changes: 76 additions & 4 deletions backend/115/115.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,12 @@ package _115
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"path"
"reflect"
"regexp"
Expand All @@ -33,6 +35,7 @@ import (

"github.com/cenkalti/backoff/v4"
"github.com/rclone/rclone/backend/115/api"
"github.com/rclone/rclone/backend/115/crypto"
"github.com/rclone/rclone/backend/115/dircache"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/config"
Expand Down Expand Up @@ -61,10 +64,13 @@ const (

maxUploadSize = 115 * fs.Gibi // 115 GiB from https://proapi.115.com/app/uploadinfo
maxUploadParts = 10000 // Part number must be an integer between 1 and 10000, inclusive.
minChunkSize = 5 * fs.Mebi // Part size should be in [100KB, 5GB]
defaultChunkSize = 5 * fs.Mebi // Part size should be in [100KB, 5GB]
minChunkSize = 100 * fs.Kibi
maxChunkSize = 100 * fs.Gibi
defaultUploadCutoff = 200 * fs.Mebi
defaultNohashSize = 100 * fs.Mebi
StreamUploadLimit = 5 * fs.Gibi
maxUploadCutoff = 20 * fs.Gibi // maximum allowed size for singlepart uploads
)

// Register with Fs
Expand Down Expand Up @@ -216,7 +222,7 @@ it's buffered by the OSS SDK, when in fact it may still be uploading.
A bigger chunk size means a bigger OSS SDK buffer and progress
reporting more deviating from the truth.
`,
Default: minChunkSize,
Default: defaultChunkSize,
Advanced: true,
}, {
Name: "max_upload_parts",
Expand Down Expand Up @@ -490,6 +496,40 @@ func (p *poolClient) CallJSON(ctx context.Context, opts *rest.Opts, request inte
return p.client().CallJSON(ctx, opts, request, response)
}

func (p *poolClient) CallDATA(ctx context.Context, opts *rest.Opts, request interface{}, response interface{}) (resp *http.Response, err error) {
// Encode request data
input, err := json.Marshal(request)
if err != nil {
return nil, fmt.Errorf("failed to marshal request: %w", err)
}
key := crypto.GenerateKey()
opts.MultipartParams = url.Values{"data": {crypto.Encode(input, key)}}

// Perform API call
var info *api.Base
resp, err = p.client().CallJSON(ctx, opts, request, &info)
if err != nil {
return
}

// Handle API errors
if !info.State {
return nil, fmt.Errorf("API Error: %s (%d)", info.Error, info.Errno)
} else if info.Data.EncodedData == "" {
return nil, errors.New("no data")
}

// Decode and unmarshal response
output, err := crypto.Decode(info.Data.EncodedData, key)
if err != nil {
return nil, fmt.Errorf("failed to decode data: %w", err)
}
if err := json.Unmarshal(output, response); err != nil {
return nil, fmt.Errorf("failed to json.Unmarshal %q", string(output))
}
return resp, nil
}

func (p *poolClient) Call(ctx context.Context, opts *rest.Opts) (resp *http.Response, err error) {
return p.client().Call(ctx, opts)
}
Expand Down Expand Up @@ -610,9 +650,35 @@ func checkUploadChunkSize(cs fs.SizeSuffix) error {
if cs < minChunkSize {
return fmt.Errorf("%s is less than %s", cs, minChunkSize)
}
if cs > maxChunkSize {
return fmt.Errorf("%s is greater than %s", cs, maxChunkSize)
}
return nil
}

func (f *Fs) setUploadChunkSize(cs fs.SizeSuffix) (old fs.SizeSuffix, err error) {
err = checkUploadChunkSize(cs)
if err == nil {
old, f.opt.ChunkSize = f.opt.ChunkSize, cs
}
return
}

func checkUploadCutoff(cs fs.SizeSuffix) error {
if cs > maxUploadCutoff {
return fmt.Errorf("%s is greater than %s", cs, maxUploadCutoff)
}
return nil
}

func (f *Fs) setUploadCutoff(cs fs.SizeSuffix) (old fs.SizeSuffix, err error) {
err = checkUploadCutoff(cs)
if err == nil {
old, f.opt.UploadCutoff = f.opt.UploadCutoff, cs
}
return
}

// newFs partially constructs Fs from the path
//
// It constructs a valid Fs but doesn't attempt to figure out whether
Expand Down Expand Up @@ -640,6 +706,10 @@ func newFs(ctx context.Context, name, path string, m configmap.Mapper) (*Fs, err
if err != nil {
return nil, fmt.Errorf("115: chunk size: %w", err)
}
err = checkUploadCutoff(opt.UploadCutoff)
if err != nil {
return nil, fmt.Errorf("115: upload cutoff: %w", err)
}

// mod - override rootID from path remote:{ID}
if rootID, _, _ := parseRootID(path); rootID != "" {
Expand All @@ -658,7 +728,6 @@ func newFs(ctx context.Context, name, path string, m configmap.Mapper) (*Fs, err
DuplicateFiles: false, // duplicatefiles are only possible via web
CanHaveEmptyDirectories: true, // can have empty directories
NoMultiThreading: true, // set if can't have multiplethreads on one download open
ServerSideAcrossConfigs: true, // Can copy from shared FS (this is checked in Copy/Move/DirMove)
}).Fill(ctx, f)

// setting appVer
Expand Down Expand Up @@ -722,6 +791,9 @@ func NewFs(ctx context.Context, name, root string, m configmap.Mapper) (fs.Fs, e
f.rootFolderID = "0" //根目录 = root directory
}

// Can copy from shared FS (this is checked in Copy/Move/DirMove)
f.features.ServerSideAcrossConfigs = f.isShare

// Set the root folder path if it is not on the absolute root
if f.rootFolderID != "" && f.rootFolderID != "0" {
f.rootFolder, err = f.getDirPath(ctx, f.rootFolderID)
Expand Down Expand Up @@ -1598,7 +1670,7 @@ func (o *Object) Open(ctx context.Context, options ...fs.OpenOption) (in io.Read
return nil, fmt.Errorf("can't download: %w", err)
}
if o.durl.URL == "" {
return nil, errors.New("can't download: no url")
return nil, fserrors.NoRetryError(errors.New("can't download: no url"))
}
return o.open(ctx, options...)
}
Expand Down
18 changes: 18 additions & 0 deletions backend/115/115_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package _115
import (
"testing"

"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fstest/fstests"
)

Expand All @@ -12,5 +13,22 @@ func TestIntegration(t *testing.T) {
RemoteName: "Test115:",
NilObject: (*Object)(nil),
SkipInvalidUTF8: true,
ChunkedUpload: fstests.ChunkedUploadConfig{
MinChunkSize: minChunkSize,
MaxChunkSize: maxChunkSize,
},
})
}

func (f *Fs) SetUploadChunkSize(cs fs.SizeSuffix) (fs.SizeSuffix, error) {
return f.setUploadChunkSize(cs)
}

func (f *Fs) SetUploadCutoff(cs fs.SizeSuffix) (fs.SizeSuffix, error) {
return f.setUploadCutoff(cs)
}

var (
_ fstests.SetUploadChunkSizer = (*Fs)(nil)
_ fstests.SetUploadCutoffer = (*Fs)(nil)
)
19 changes: 17 additions & 2 deletions backend/115/api/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,21 @@ type DownloadURL struct {
Cookies []*http.Cookie
}

func (u *DownloadURL) UnmarshalJSON(data []byte) error {
if string(data) == "false" {
*u = DownloadURL{}
return nil
}

type Alias DownloadURL // Use type alias to avoid recursion
aux := Alias{}
if err := json.Unmarshal(data, &aux); err != nil {
return err
}
*u = DownloadURL(aux)
return nil
}

// expiry parses expiry from URL parameter t
func (u *DownloadURL) expiry() time.Time {
if p, err := url.Parse(u.URL); err == nil {
Expand All @@ -304,9 +319,9 @@ func (u *DownloadURL) expired() bool {
return expiry.Round(0).Add(-expiryDelta).Before(time.Now())
}

// Valid reports whether u is non-nil, has an URL, and is not expired.
// Valid reports whether u is non-nil and is not expired.
func (u *DownloadURL) Valid() bool {
return u != nil && u.URL != "" && !u.expired()
return u != nil && !u.expired()
}

func (u *DownloadURL) Cookie() string {
Expand Down
Loading

0 comments on commit 1255be5

Please sign in to comment.