Repository: 52funny/pikpakcli
Branch: master
Commit: fc6a7e2f2d46
Files: 66
Total size: 222.8 KB
Directory structure:
gitextract_u9bxj4ec/
├── .github/
│ └── workflows/
│ ├── dockerhub.yml
│ └── goreleaser.yml
├── .gitignore
├── .goreleaser.yaml
├── Dockerfile
├── LICENSE
├── README.md
├── README_zhCN.md
├── cli/
│ ├── del/
│ │ └── del.go
│ ├── download/
│ │ ├── download.go
│ │ ├── download_test.go
│ │ └── progress_test.go
│ ├── empty/
│ │ ├── empty.go
│ │ └── empty_test.go
│ ├── list/
│ │ └── list.go
│ ├── new/
│ │ ├── folder/
│ │ │ └── folder.go
│ │ ├── new.go
│ │ ├── sha/
│ │ │ └── sha.go
│ │ └── url/
│ │ └── url.go
│ ├── quota/
│ │ ├── quota.go
│ │ └── quota_test.go
│ ├── rename/
│ │ └── rename.go
│ ├── root.go
│ ├── rubbish/
│ │ ├── rubbish.go
│ │ └── rubbish_test.go
│ ├── share/
│ │ └── share.go
│ ├── shell.go
│ └── upload/
│ └── upload.go
├── conf/
│ └── config.go
├── config_example.yml
├── docs/
│ ├── command.md
│ ├── command_docker.md
│ ├── command_zhCN.md
│ ├── config.md
│ └── config_zhCN.md
├── go.mod
├── go.sum
├── internal/
│ ├── api/
│ │ ├── captcha_token.go
│ │ ├── constants.go
│ │ ├── download.go
│ │ ├── download_test.go
│ │ ├── file.go
│ │ ├── file_test.go
│ │ ├── folder.go
│ │ ├── glob.go
│ │ ├── glob_test.go
│ │ ├── pikpak.go
│ │ ├── quota.go
│ │ ├── quota_test.go
│ │ ├── refresh_token.go
│ │ ├── session.go
│ │ ├── sha.go
│ │ ├── upload.go
│ │ └── url.go
│ ├── logx/
│ │ └── logx.go
│ ├── shell/
│ │ ├── open.go
│ │ ├── shell.go
│ │ └── shell_test.go
│ └── utils/
│ ├── format.go
│ ├── format_test.go
│ ├── path.go
│ ├── path_test.go
│ └── sync.go
├── main.go
└── rules/
├── README.md
└── rubbish_rules.txt
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/workflows/dockerhub.yml
================================================
name: Publish Docker image
on:
push:
tags:
- "v*"
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Docker meta
id: docker_meta
uses: docker/metadata-action@v5
with:
images: 52funny/pikpakcli
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push Docker Hub
uses: docker/build-push-action@v6
with:
push: true
context: .
platforms: linux/amd64,linux/arm64
file: ./Dockerfile
tags: ${{ steps.docker_meta.outputs.tags }}
================================================
FILE: .github/workflows/goreleaser.yml
================================================
name: goreleaser
on:
push:
tags:
- "v*"
permissions:
contents: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v5
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.TOKEN }}
================================================
FILE: .gitignore
================================================
.vscode
.pikpaksync.txt
config.yml
pikpakcli
dist
dist/
================================================
FILE: .goreleaser.yaml
================================================
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
# The lines below are called `modelines`. See `:help modeline`
# Feel free to remove those if you don't want/need to use them.
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
# vim: set ts=2 sw=2 tw=0 fo=cnqoj
version: 2
before:
hooks:
# You may remove this if you don't use go modules.
- go mod tidy
# you may remove this if you don't need go generate
- go generate ./...
builds:
- env:
- CGO_ENABLED=0
goos:
- linux
- windows
- darwin
archives:
- format: tar.gz
# this name template makes the OS and Arch compatible with the results of `uname`.
name_template: >-
{{ .ProjectName }}_
{{- title .Os }}_
{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end }}
# use zip for windows archives
format_overrides:
- goos: windows
format: zip
files:
- config_example.yml
changelog:
sort: asc
filters:
exclude:
- "^docs:"
- "^test:"
================================================
FILE: Dockerfile
================================================
FROM golang:1.21-alpine AS builder
RUN apk add --no-cache git
WORKDIR /src
COPY go.mod go.sum ./
RUN go mod download
COPY . .
RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-s -w" -o /usr/local/bin/pikpakcli ./main.go
FROM alpine:3.18
RUN apk add --no-cache ca-certificates
COPY --from=builder /usr/local/bin/pikpakcli /usr/local/bin/pikpakcli
WORKDIR /root
ENTRYPOINT ["/usr/local/bin/pikpakcli"]
================================================
FILE: LICENSE
================================================
MIT License
Copyright (c) 2024 52funny
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: README.md
================================================
# PikPak CLI


English | [简体中文](https://github.com/52funny/pikpakcli/blob/master/README_zhCN.md)
PikPakCli is a command line tool for Pikpak Cloud.

## Installation
### Compiling from source code
To build the tool from the source code, ensure you have [Go](https://go.dev/doc/install) installed on your system.
Clone the project:
```bash
git clone https://github.com/52funny/pikpakcli
```
Build the project:
```bash
go build
```
Run the tool:
```
./pikpakcli
```
### Build with Docker
You can also run `pikpakcli` using Docker.
Pull the Docker image:
```bash
docker pull 52funny/pikpakcli:master
```
Run the tool:
```bash
docker run --rm 52funny/pikpakcli:master --help
```
### Download from Release
Download the executable file you need from the [Releases](https://github.com/52funny/pikpakcli/releases) page, then run it.
## Configuration
First, configure the `config_example.yml` file in the project, entering your account details.
If your account uses a phone number, it must be preceded by the country code, like `+861xxxxxxxxxx`.
Then, rename it to `config.yml`.
The configuration file will first be read from the current directory (`config.yml`). If it doesn't exist there, it will be read from the user's default configuration directory. The default root directories for each platform are:
- Linux: `$HOME/.config/pikpakcli`
- Darwin: `$HOME/Library/Application Support/pikpakcli`
- Windows: `%AppData%/pikpakcli`
The optional `open` section can override which local program is used by the interactive shell `open` builtin for different file categories.
> **For Docker Users:** You need to mount the configuration file into the Docker container. For example, if your `config.yml` is located at `/path/to/your/config.yml`, you can run the Docker container like this:
```bash
docker run -v /path/to/your/config.yml:/root/.config/pikpakcli/config.yml pikpakcli:latest ls
# if your config.yml is in the project directory, you can just run:
docker run -v $PWD/config.yml:/root/.config/pikpakcli/config.yml pikpakcli:latest ls
```
## Get started
After that you can run the `ls` command to see the files stored on **PikPak**.
```bash
./pikpakcli ls
```
## Usage
See [Command](docs/command.md) for more commands information.
## Contributors
================================================
FILE: README_zhCN.md
================================================
# PikPak CLI


PikPakCli 是 PikPak 的命令行工具。

## 安装方法
### 从源码编译
要从源代码构建该工具,请确保您的系统已安装 [Go](https://go.dev/doc/install) 环境。
克隆项目
```bash
git clone https://github.com/52funny/pikpakcli
```
生成可执行文件
```bash
go build
```
运行
```bash
./pikpakcli
```
### 从 Release 下载
从 Release 下载你所需要的版本,然后运行。
## 配置文件
首先将项目中的 `config_example.yml` 配置一下,输入自己的账号密码
如果账号是手机号,手机号要以区号开头。如 `+861xxxxxxxxxx`
然后将其重命名为 `config.yml`
配置文件将会优先从当前目录进行读取 `config.yml`,如果当前目录下不存在 `config.yml` 将会从用户的配置数据的默认根目录进行读取,各个平台的默认根目录如下:
- Linux: `$HOME/.config/pikpakcli`
- Darwin: `$HOME/Library/Application Support/pikpakcli`
- Windows: `%AppData%/pikpakcli`
可选的 `open` 配置段可以覆盖交互式 shell 中 `open` 内置命令针对不同文件类型使用的本地程序。
## 开始
之后你就可以运行 `ls` 指令来查看存储在 **PikPak** 上的文件了
```bash
./pikpakcli ls
```
## 用法
参阅 [Command](docs/command_zhCN.md) 查看更多的指令
## 贡献者
================================================
FILE: cli/del/del.go
================================================
package delete
import (
"fmt"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/spf13/cobra"
)
var path string
var DeleteCmd = &cobra.Command{
Use: "delete [file-or-folder ...]",
Aliases: []string{"del", "rm"},
Short: "Delete files or folders on the PikPak server",
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
if err := p.Login(); err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
flagPathSpecified := cmd.Flags().Changed("path")
args, err := api.ExpandRemotePatterns(&p, path, args, flagPathSpecified)
if err != nil {
fmt.Println("Expand delete target failed")
logx.Error(err)
return
}
for parentPath, names := range groupDeleteTargets(args, flagPathSpecified) {
if err := deleteEntries(&p, parentPath, names); err != nil {
fmt.Println("Delete entries failed")
logx.Error(err)
}
}
},
}
func init() {
DeleteCmd.Flags().StringVarP(&path, "path", "p", "/", "The path where to look for the file")
}
func groupDeleteTargets(args []string, forceParentPath bool) map[string][]string {
targets := make(map[string][]string)
for _, arg := range args {
parentPath := path
name := arg
if !forceParentPath || strings.HasPrefix(arg, "/") || strings.Contains(arg, "/") {
resolvedParentPath, resolvedName := utils.SplitRemotePath(arg)
if resolvedName == "" {
continue
}
name = resolvedName
if resolvedParentPath == "" {
parentPath = "/"
} else {
parentPath = resolvedParentPath
}
}
targets[parentPath] = append(targets[parentPath], name)
}
return targets
}
func deleteEntries(p *api.PikPak, parentPath string, names []string) error {
parentID, err := p.GetPathFolderId(parentPath)
if err != nil {
return fmt.Errorf("get path folder id for %s failed: %w", parentPath, err)
}
files, err := p.GetFolderFileStatList(parentID)
if err != nil {
return fmt.Errorf("get file list for %s failed: %w", parentPath, err)
}
fileIndex := make(map[string]api.FileStat, len(files))
for _, file := range files {
fileIndex[file.Name] = file
}
for _, name := range names {
file, ok := fileIndex[name]
if !ok {
fmt.Printf("Entry not found in %s: %s\n", parentPath, name)
continue
}
if err := p.DeleteFile(file.ID); err != nil {
fmt.Printf("Delete %s from %s failed\n", name, parentPath)
logx.Error(err)
continue
}
fmt.Printf("Deleted %s from %s\n", name, parentPath)
}
return nil
}
================================================
FILE: cli/download/download.go
================================================
package download
import (
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/spf13/cobra"
"github.com/vbauerster/mpb/v8"
"github.com/vbauerster/mpb/v8/decor"
)
var DownloadCmd = &cobra.Command{
Use: "download",
Aliases: []string{"d"},
Short: `Download file from pikpak server`,
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
cmd.Help()
return
}
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
args, err = api.ExpandRemotePatterns(&p, folder, args, false)
if err != nil {
fmt.Println("Expand download target failed")
logx.Error(err)
return
}
handleDownload(cmd, &p, args)
},
}
// Number of simultaneous downloads
//
// default 1
var count int
// Specifies the folder of the pikpak server
//
// default server root directory (.)
var folder string
// parent path id
var parentId string
// Output directory
//
// default current directory (.)
var output string
// Progress bar
//
// default false
var progress bool
type warpFile struct {
f *api.File
output string
}
type warpStat struct {
s api.FileStat
output string
}
const progressNameMaxRunes = 36
func init() {
DownloadCmd.Flags().IntVarP(&count, "count", "c", 1, "number of simultaneous downloads")
DownloadCmd.Flags().StringVarP(&output, "output", "o", ".", "output directory")
DownloadCmd.Flags().StringVarP(&folder, "path", "p", "/", "specific the base path on the pikpak server")
DownloadCmd.Flags().StringVarP(&parentId, "parent-id", "P", "", "the parent path id")
DownloadCmd.Flags().BoolVarP(&progress, "progress", "g", false, "show download progress")
}
type downloadTargetResolver interface {
GetFileByPath(path string) (api.FileStat, error)
GetFileStat(parentId string, name string) (api.FileStat, error)
GetPathFolderId(dirPath string) (string, error)
}
func handleDownload(cmd *cobra.Command, p *api.PikPak, args []string) {
if err := utils.CreateDirIfNotExist(output); err != nil {
fmt.Println("Create output directory failed")
logx.Error(err)
return
}
if requiresExplicitOutputFlag(cmd, args) {
fmt.Println("Use -o to specify the output directory when downloading specific files")
return
}
for _, arg := range args {
downloadTarget(p, arg)
}
}
func requiresExplicitOutputFlag(cmd *cobra.Command, args []string) bool {
if cmd.Flags().Changed("output") || len(args) <= 1 {
return false
}
for _, arg := range args {
trimmed := strings.TrimSpace(arg)
if trimmed == "." || trimmed == ".." {
return true
}
}
return false
}
func downloadTarget(p *api.PikPak, arg string) {
stat, err := resolveDownloadTarget(p, arg)
if err != nil {
target := remoteTargetPath(arg)
fmt.Println("Resolve download target failed:", target)
logx.Error(err)
return
}
if stat.Kind == api.FileKindFolder {
downloadFolder(p, stat.ID, localOutputRoot(stat.Name))
return
}
downloadFiles(p, []warpFile{
{
f: mustGetFile(p, stat),
output: output,
},
})
}
func downloadFolder(p *api.PikPak, folderID string, rootOutput string) {
collectStat := make([]warpStat, 0)
recursive(p, &collectStat, folderID, rootOutput)
downloadStats(p, collectStat)
}
func downloadStats(p *api.PikPak, collectStat []warpStat) {
if len(collectStat) == 0 {
return
}
statCh := make(chan warpStat, len(collectStat))
statDone := make(chan struct{})
fileCh := make(chan warpFile, len(collectStat))
fileDone := make(chan struct{})
for i := 0; i < 4; i += 1 {
go func(fileCh chan<- warpFile, statCh <-chan warpStat, statDone chan<- struct{}) {
for {
stat, ok := <-statCh
if !ok {
break
}
file, err := p.GetFile(stat.s.ID)
if err != nil {
fmt.Println("Get file failed")
logx.Error(err)
}
fileCh <- warpFile{
f: &file,
output: stat.output,
}
statDone <- struct{}{}
}
}(fileCh, statCh, statDone)
}
pb := startDownloadWorkers(fileCh, fileDone)
for i := 0; i < len(collectStat); i += 1 {
err := utils.CreateDirIfNotExist(collectStat[i].output)
if err != nil {
fmt.Println("Create output directory failed")
logx.Error(err)
return
}
statCh <- collectStat[i]
}
close(statCh)
for i := 0; i < len(collectStat); i += 1 {
<-statDone
}
close(statDone)
for i := 0; i < len(collectStat); i += 1 {
<-fileDone
}
if pb != nil {
pb.Wait()
}
}
func recursive(p *api.PikPak, collectWarpFile *[]warpStat, parentId string, parentPath string) {
statList, err := p.GetFolderFileStatList(parentId)
if err != nil {
fmt.Println("Get folder file stat list failed")
logx.Error(err)
return
}
for _, r := range statList {
if r.Kind == api.FileKindFolder {
recursive(p, collectWarpFile, r.ID, filepath.Join(parentPath, r.Name))
} else {
// file, _ := p.GetFile(r.ID)
*collectWarpFile = append(*collectWarpFile, warpStat{
s: r,
output: parentPath,
})
// fmt.Println(r.Name, r.Size, r.Kind, parentPath)
}
}
}
func downloadFiles(p *api.PikPak, files []warpFile) {
sendCh := make(chan warpFile, len(files))
receiveCh := make(chan struct{}, len(files))
pb := startDownloadWorkers(sendCh, receiveCh)
for _, file := range files {
sendCh <- file
}
close(sendCh)
for i := 0; i < len(files); i++ {
<-receiveCh
}
close(receiveCh)
if pb != nil {
pb.Wait()
}
}
func startDownloadWorkers(sendCh <-chan warpFile, receiveCh chan<- struct{}) *mpb.Progress {
var pb *mpb.Progress
if progress {
pb = mpb.New(
mpb.WithWidth(30),
mpb.WithAutoRefresh(),
)
}
for i := 0; i < count; i++ {
go download(sendCh, receiveCh, pb)
}
return pb
}
func resolveDownloadTarget(p downloadTargetResolver, arg string) (api.FileStat, error) {
if target := strings.TrimSpace(arg); target == "" {
if parentId != "" {
return api.FileStat{
Kind: api.FileKindFolder,
ID: parentId,
Name: filepath.Base(filepath.Clean(folder)),
}, nil
}
remotePath := remoteTargetPath("")
if remotePath == string(filepath.Separator) {
id, err := p.GetPathFolderId(folder)
if err != nil {
return api.FileStat{}, err
}
return api.FileStat{
Kind: api.FileKindFolder,
ID: id,
Name: "",
}, nil
}
return p.GetFileByPath(remotePath)
}
if parentId != "" && !filepath.IsAbs(arg) && !strings.Contains(arg, string(filepath.Separator)) {
return p.GetFileStat(parentId, arg)
}
return p.GetFileByPath(remoteTargetPath(arg))
}
func remoteTargetPath(arg string) string {
base := strings.TrimSpace(folder)
target := strings.TrimSpace(arg)
if target == "" {
target = "."
}
if filepath.IsAbs(target) {
return filepath.Clean(target)
}
return filepath.Clean(filepath.Join(string(filepath.Separator), base, target))
}
func localOutputRoot(name string) string {
if strings.TrimSpace(name) == "" || name == string(filepath.Separator) || name == "." {
return output
}
return filepath.Join(output, name)
}
func mustGetFile(p *api.PikPak, stat api.FileStat) *api.File {
file, err := p.GetFile(stat.ID)
if err != nil {
fmt.Println("Get file failed")
logx.Error(err)
return &api.File{FileStat: stat}
}
return &file
}
func progressDisplayName(warp warpFile) string {
name := warp.f.Name
if base := filepath.Base(filepath.Clean(warp.output)); base != "." && base != string(filepath.Separator) && base != "" {
name = filepath.Join(base, name)
}
return trimRunes(name, progressNameMaxRunes)
}
func trimRunes(value string, max int) string {
runes := []rune(value)
if len(runes) <= max {
return value
}
if max <= 3 {
return string(runes[:max])
}
return string(runes[:max-3]) + "..."
}
func download(inCh <-chan warpFile, out chan<- struct{}, pb *mpb.Progress) {
for {
warp, ok := <-inCh
if !ok {
break
}
path := filepath.Join(warp.output, warp.f.Name)
exist, err := utils.Exists(path)
if err != nil {
// logrus.Errorln("Access", path, "Failed:", err)
out <- struct{}{}
continue
}
flag := path + ".pikpakclidownload"
hasFlag, err := utils.Exists(flag)
if err != nil {
// logrus.Errorln("Access", flag, "Failed:", err)
out <- struct{}{}
continue
}
if exist && !hasFlag {
// logrus.Infoln("Skip downloaded file", warp.f.Name)
out <- struct{}{}
continue
}
err = utils.TouchFile(flag)
if err != nil {
// logrus.Errorln("Create flag file", flag, "Failed:", err)
out <- struct{}{}
continue
}
siz, err := strconv.ParseInt(warp.f.Size, 10, 64)
if err != nil {
// logrus.Errorln("Parse File size", warp.f.Size, "Failed:", err)
out <- struct{}{}
continue
}
var bar *mpb.Bar
if pb != nil {
bar = pb.AddBar(siz,
mpb.PrependDecorators(
decor.Name(progressDisplayName(warp), decor.WC{W: progressNameMaxRunes + 2, C: decor.DSyncWidth}),
decor.CountersKibiByte("% .1f / % .1f", decor.WCSyncSpace),
decor.Percentage(decor.WCSyncSpace),
),
mpb.AppendDecorators(
decor.Name(" | ", decor.WCSyncSpace),
decor.Name("ETA ", decor.WCSyncSpace),
decor.EwmaETA(decor.ET_STYLE_GO, 30),
decor.Name(" | ", decor.WCSyncSpace),
decor.Name("SPD ", decor.WCSyncSpace),
decor.EwmaSpeed(decor.SizeB1024(0), "% .2f", 60),
),
)
}
// start downloading
err = warp.f.Download(path, bar)
// if hasn't error then remove flag file
if err == nil {
if pb == nil {
fmt.Println("Download", warp.f.Name, "Success")
}
os.Remove(flag)
if bar != nil {
bar.SetTotal(siz, true)
}
} else {
if pb == nil {
fmt.Println("Download failed:", warp.f.Name)
logx.Error(err)
}
if bar != nil {
bar.Abort(false)
}
}
out <- struct{}{}
}
}
================================================
FILE: cli/download/download_test.go
================================================
package download
import (
"errors"
"path/filepath"
"testing"
"github.com/52funny/pikpakcli/internal/api"
"github.com/spf13/cobra"
"github.com/stretchr/testify/require"
)
type fakeTargetResolver struct {
getFileByPath func(path string) (api.FileStat, error)
getFileStat func(parentId string, name string) (api.FileStat, error)
getPathFolder func(dirPath string) (string, error)
}
func (f fakeTargetResolver) GetFileByPath(path string) (api.FileStat, error) {
return f.getFileByPath(path)
}
func (f fakeTargetResolver) GetFileStat(parentId string, name string) (api.FileStat, error) {
return f.getFileStat(parentId, name)
}
func (f fakeTargetResolver) GetPathFolderId(dirPath string) (string, error) {
return f.getPathFolder(dirPath)
}
func TestRemoteTargetPathJoinsBasePath(t *testing.T) {
originalFolder := folder
t.Cleanup(func() {
folder = originalFolder
})
folder = "/Movies"
require.Equal(t, filepath.Clean("/Movies/Kids/Peppa.mp4"), remoteTargetPath("Kids/Peppa.mp4"))
require.Equal(t, filepath.Clean("/TV"), remoteTargetPath("/TV"))
}
func TestResolveDownloadTargetUsesParentIDForDirectChild(t *testing.T) {
originalFolder := folder
originalParentID := parentId
t.Cleanup(func() {
folder = originalFolder
parentId = originalParentID
})
folder = "/Movies"
parentId = "parent-123"
resolver := fakeTargetResolver{
getFileStat: func(gotParentID string, gotName string) (api.FileStat, error) {
require.Equal(t, "parent-123", gotParentID)
require.Equal(t, "Peppa.mp4", gotName)
return api.FileStat{ID: "file-1", Name: "Peppa.mp4"}, nil
},
getFileByPath: func(path string) (api.FileStat, error) {
return api.FileStat{}, errors.New("should not resolve by path")
},
getPathFolder: func(dirPath string) (string, error) {
return "", errors.New("should not resolve folder id")
},
}
stat, err := resolveDownloadTarget(resolver, "Peppa.mp4")
require.NoError(t, err)
require.Equal(t, "file-1", stat.ID)
}
func TestResolveDownloadTargetJoinsBasePathForNestedArg(t *testing.T) {
originalFolder := folder
originalParentID := parentId
t.Cleanup(func() {
folder = originalFolder
parentId = originalParentID
})
folder = "/Movies"
parentId = "parent-123"
resolver := fakeTargetResolver{
getFileStat: func(parentId string, name string) (api.FileStat, error) {
return api.FileStat{}, errors.New("should not resolve direct child")
},
getFileByPath: func(path string) (api.FileStat, error) {
require.Equal(t, filepath.Clean("/Movies/Kids/Peppa.mp4"), path)
return api.FileStat{ID: "file-2", Name: "Peppa.mp4"}, nil
},
getPathFolder: func(dirPath string) (string, error) {
return "", errors.New("should not resolve folder id")
},
}
stat, err := resolveDownloadTarget(resolver, "Kids/Peppa.mp4")
require.NoError(t, err)
require.Equal(t, "file-2", stat.ID)
}
func TestResolveDownloadTargetWithoutArgsUsesBaseFolder(t *testing.T) {
originalFolder := folder
originalParentID := parentId
t.Cleanup(func() {
folder = originalFolder
parentId = originalParentID
})
folder = "/Movies"
parentId = ""
resolver := fakeTargetResolver{
getFileByPath: func(path string) (api.FileStat, error) {
require.Equal(t, filepath.Clean("/Movies"), path)
return api.FileStat{Kind: api.FileKindFolder, ID: "folder-1", Name: "Movies"}, nil
},
getFileStat: func(parentId string, name string) (api.FileStat, error) {
return api.FileStat{}, errors.New("should not resolve by parent id")
},
getPathFolder: func(dirPath string) (string, error) {
return "", errors.New("should not resolve folder id")
},
}
stat, err := resolveDownloadTarget(resolver, "")
require.NoError(t, err)
require.Equal(t, "folder-1", stat.ID)
require.Equal(t, api.FileKindFolder, stat.Kind)
}
func TestRequiresExplicitOutputFlag(t *testing.T) {
cmd := &cobra.Command{}
cmd.Flags().StringP("output", "o", ".", "")
require.False(t, requiresExplicitOutputFlag(cmd, []string{"."}))
require.True(t, requiresExplicitOutputFlag(cmd, []string{"file.txt", "."}))
require.True(t, requiresExplicitOutputFlag(cmd, []string{"file.txt", ".."}))
require.False(t, requiresExplicitOutputFlag(cmd, []string{"file.txt"}))
require.NoError(t, cmd.Flags().Set("output", "."))
require.False(t, requiresExplicitOutputFlag(cmd, []string{"file.txt", "."}))
}
================================================
FILE: cli/download/progress_test.go
================================================
package download
import (
"path/filepath"
"testing"
"github.com/52funny/pikpakcli/internal/api"
"github.com/stretchr/testify/require"
)
func TestTrimRunes(t *testing.T) {
require.Equal(t, "abcdef", trimRunes("abcdef", 6))
require.Equal(t, "你好世...", trimRunes("你好世界欢迎你", 6))
}
func TestProgressDisplayNameIncludesParentDir(t *testing.T) {
warp := warpFile{
f: &api.File{FileStat: api.FileStat{Name: "Peppa.mp4"}},
output: filepath.Join("Film", "Kids"),
}
require.Equal(t, "Kids/Peppa.mp4", progressDisplayName(warp))
}
================================================
FILE: cli/empty/empty.go
================================================
package empty
import (
"context"
"errors"
"fmt"
"path/filepath"
"sync"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/spf13/cobra"
)
var targetPath string
var concurrency int
var deleteMode bool
type emptyFolderProvider interface {
GetPathFolderId(dirPath string) (string, error)
GetFolderFileStatList(parentId string) ([]api.FileStat, error)
DeleteFile(fileId string) error
}
var EmptyCmd = &cobra.Command{
Use: "empty [path]",
Short: "Recursively list empty folders on the PikPak server",
Args: cobra.MaximumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
path := targetPath
if len(args) > 0 {
path = args[0]
}
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
if err := p.Login(); err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
emptyFolders, err := handleEmptyFolders(cmd.Context(), &p, path, concurrency, deleteMode)
if err != nil {
if errors.Is(err, context.Canceled) {
fmt.Println("Empty folder scan canceled")
return
}
fmt.Println("Handle empty folders failed")
logx.Error(err)
return
}
if len(emptyFolders) == 0 {
fmt.Printf("No empty folders found under %s\n", path)
return
}
for _, folder := range emptyFolders {
if deleteMode {
fmt.Printf("Deleted empty folder: %s\n", folder)
continue
}
fmt.Printf("Empty folder: %s\n", folder)
}
},
}
func init() {
EmptyCmd.Flags().StringVarP(&targetPath, "path", "p", "/", "The path where to remove empty folders recursively")
EmptyCmd.Flags().IntVarP(&concurrency, "concurrency", "c", 8, "number of folders to process concurrently")
EmptyCmd.Flags().BoolVarP(&deleteMode, "delete", "d", false, "delete the empty folders instead of only listing them")
}
func handleEmptyFolders(ctx context.Context, p emptyFolderProvider, rootPath string, concurrency int, deleteMode bool) ([]string, error) {
if ctx == nil {
ctx = context.Background()
}
if err := ctx.Err(); err != nil {
return nil, err
}
rootID, err := p.GetPathFolderId(rootPath)
if err != nil {
return nil, err
}
if concurrency < 1 {
concurrency = 1
}
deleted := make([]string, 0)
state := emptyWalkState{
sem: make(chan struct{}, concurrency),
}
if _, err := walkEmptyFolders(ctx, p, rootID, filepath.Clean(rootPath), filepath.Clean(rootPath) != string(filepath.Separator), deleteMode, &deleted, &state); err != nil {
return nil, err
}
return deleted, nil
}
type emptyWalkState struct {
sem chan struct{}
mu sync.Mutex
}
type emptyFolderResult struct {
empty bool
err error
}
func walkEmptyFolders(ctx context.Context, p emptyFolderProvider, folderID, currentPath string, allowDeleteCurrent bool, deleteMode bool, deleted *[]string, state *emptyWalkState) (bool, error) {
if err := ctx.Err(); err != nil {
return false, err
}
files, err := p.GetFolderFileStatList(folderID)
if err != nil {
return false, err
}
hasFiles := false
hasRemainingFolders := false
results := make(chan emptyFolderResult, len(files))
var childFolders int
for _, file := range files {
if err := ctx.Err(); err != nil {
return false, err
}
if file.Kind != api.FileKindFolder {
hasFiles = true
continue
}
childFolders++
childPath := filepath.Join(currentPath, file.Name)
select {
case <-ctx.Done():
return false, ctx.Err()
case state.sem <- struct{}{}:
go func(file api.FileStat, childPath string) {
defer func() {
<-state.sem
}()
childEmpty, err := walkEmptyFolders(ctx, p, file.ID, childPath, true, deleteMode, deleted, state)
results <- emptyFolderResult{
empty: childEmpty,
err: err,
}
}(file, childPath)
default:
childEmpty, err := walkEmptyFolders(ctx, p, file.ID, childPath, true, deleteMode, deleted, state)
results <- emptyFolderResult{
empty: childEmpty,
err: err,
}
}
}
for i := 0; i < childFolders; i++ {
select {
case <-ctx.Done():
return false, ctx.Err()
case result := <-results:
if result.err != nil {
return false, result.err
}
if !result.empty {
hasRemainingFolders = true
}
}
}
isEmpty := !hasFiles && !hasRemainingFolders
if !isEmpty {
return false, nil
}
if !allowDeleteCurrent {
return true, nil
}
if deleteMode {
if err := ctx.Err(); err != nil {
return false, err
}
if err := p.DeleteFile(folderID); err != nil {
return false, err
}
}
state.mu.Lock()
*deleted = append(*deleted, currentPath)
state.mu.Unlock()
return true, nil
}
================================================
FILE: cli/empty/empty_test.go
================================================
package empty
import (
"context"
"errors"
"path/filepath"
"sync"
"testing"
"time"
"github.com/52funny/pikpakcli/internal/api"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
type fakeEmptyFolderProvider struct {
rootID string
pathToID map[string]string
folders map[string][]api.FileStat
deletedFiles []string
mu sync.Mutex
}
func (f *fakeEmptyFolderProvider) GetPathFolderId(dirPath string) (string, error) {
if id, ok := f.pathToID[filepath.Clean(dirPath)]; ok {
return id, nil
}
return "", errors.New("path not found")
}
func (f *fakeEmptyFolderProvider) GetFolderFileStatList(parentId string) ([]api.FileStat, error) {
f.mu.Lock()
defer f.mu.Unlock()
files := f.folders[parentId]
cloned := make([]api.FileStat, len(files))
copy(cloned, files)
return cloned, nil
}
func (f *fakeEmptyFolderProvider) DeleteFile(fileId string) error {
f.mu.Lock()
defer f.mu.Unlock()
f.deletedFiles = append(f.deletedFiles, fileId)
for parentID, files := range f.folders {
filtered := files[:0]
for _, file := range files {
if file.ID != fileId {
filtered = append(filtered, file)
}
}
f.folders[parentID] = filtered
}
delete(f.folders, fileId)
return nil
}
func TestHandleEmptyFoldersDeletesNestedEmptyFolders(t *testing.T) {
provider := &fakeEmptyFolderProvider{
pathToID: map[string]string{
filepath.Clean("/"): "root",
},
folders: map[string][]api.FileStat{
"root": {
{ID: "movies", Name: "Movies", Kind: api.FileKindFolder},
{ID: "music", Name: "Music", Kind: api.FileKindFolder},
{ID: "video", Name: "video.mp4", Kind: api.FileKindFile},
},
"movies": {
{ID: "kids", Name: "Kids", Kind: api.FileKindFolder},
},
"kids": {},
"music": {},
},
}
deleted, err := handleEmptyFolders(context.Background(), provider, "/", 4, true)
require.NoError(t, err)
assert.ElementsMatch(t, []string{filepath.Clean("/Movies/Kids"), filepath.Clean("/Movies"), filepath.Clean("/Music")}, deleted)
assert.ElementsMatch(t, []string{"kids", "movies", "music"}, provider.deletedFiles)
}
func TestHandleEmptyFoldersSkipsNonEmptyRootTarget(t *testing.T) {
provider := &fakeEmptyFolderProvider{
pathToID: map[string]string{
filepath.Clean("/Movies"): "movies",
},
folders: map[string][]api.FileStat{
"movies": {
{ID: "episode", Name: "episode.mkv", Kind: api.FileKindFile},
},
},
}
deleted, err := handleEmptyFolders(context.Background(), provider, "/Movies", 4, true)
require.NoError(t, err)
assert.Empty(t, deleted)
assert.Empty(t, provider.deletedFiles)
}
func TestHandleEmptyFoldersDeletesTargetWhenItBecomesEmpty(t *testing.T) {
provider := &fakeEmptyFolderProvider{
pathToID: map[string]string{
filepath.Clean("/Movies"): "movies",
},
folders: map[string][]api.FileStat{
"movies": {
{ID: "kids", Name: "Kids", Kind: api.FileKindFolder},
},
"kids": {},
},
}
deleted, err := handleEmptyFolders(context.Background(), provider, "/Movies", 4, true)
require.NoError(t, err)
assert.Equal(t, []string{filepath.Clean("/Movies/Kids"), filepath.Clean("/Movies")}, deleted)
assert.Equal(t, []string{"kids", "movies"}, provider.deletedFiles)
}
func TestHandleEmptyFoldersNormalizesInvalidConcurrency(t *testing.T) {
provider := &fakeEmptyFolderProvider{
pathToID: map[string]string{
filepath.Clean("/Movies"): "movies",
},
folders: map[string][]api.FileStat{
"movies": {},
},
}
deleted, err := handleEmptyFolders(context.Background(), provider, "/Movies", 0, true)
require.NoError(t, err)
assert.Equal(t, []string{filepath.Clean("/Movies")}, deleted)
assert.Equal(t, []string{"movies"}, provider.deletedFiles)
}
func TestHandleEmptyFoldersListsWithoutDeleting(t *testing.T) {
provider := &fakeEmptyFolderProvider{
pathToID: map[string]string{
filepath.Clean("/"): "root",
},
folders: map[string][]api.FileStat{
"root": {
{ID: "movies", Name: "Movies", Kind: api.FileKindFolder},
},
"movies": {},
},
}
emptyFolders, err := handleEmptyFolders(context.Background(), provider, "/", 4, false)
require.NoError(t, err)
assert.Equal(t, []string{filepath.Clean("/Movies")}, emptyFolders)
assert.Empty(t, provider.deletedFiles)
}
type blockingEmptyFolderProvider struct {
fakeEmptyFolderProvider
block chan struct{}
}
func (f *blockingEmptyFolderProvider) GetFolderFileStatList(parentId string) ([]api.FileStat, error) {
if parentId == "slow" {
<-f.block
}
return f.fakeEmptyFolderProvider.GetFolderFileStatList(parentId)
}
func TestHandleEmptyFoldersHonorsCanceledContext(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
cancel()
provider := &fakeEmptyFolderProvider{
pathToID: map[string]string{
filepath.Clean("/"): "root",
},
folders: map[string][]api.FileStat{
"root": {},
},
}
deleted, err := handleEmptyFolders(ctx, provider, "/", 4, false)
require.ErrorIs(t, err, context.Canceled)
assert.Nil(t, deleted)
}
func TestHandleEmptyFoldersStopsWaitingAfterCancel(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
provider := &blockingEmptyFolderProvider{
fakeEmptyFolderProvider: fakeEmptyFolderProvider{
pathToID: map[string]string{
filepath.Clean("/"): "root",
},
folders: map[string][]api.FileStat{
"root": {
{ID: "slow", Name: "slow", Kind: api.FileKindFolder},
},
"slow": {},
},
},
block: make(chan struct{}),
}
done := make(chan error, 1)
go func() {
_, err := handleEmptyFolders(ctx, provider, "/", 4, false)
done <- err
}()
cancel()
select {
case err := <-done:
require.ErrorIs(t, err, context.Canceled)
case <-time.After(time.Second):
t.Fatal("handleEmptyFolders did not stop promptly after cancellation")
}
close(provider.block)
}
================================================
FILE: cli/list/list.go
================================================
package list
import (
"fmt"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/fatih/color"
"github.com/spf13/cobra"
)
var long bool
var human bool
var path string
var parentId string
var ListCmd = &cobra.Command{
Use: "ls",
Short: `Get the directory information under the specified folder`,
Run: func(cmd *cobra.Command, args []string) {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
long, _ := cmd.Flags().GetBool("long")
human, _ := cmd.Flags().GetBool("human")
path, _ := cmd.Flags().GetString("path")
parentId, _ := cmd.Flags().GetString("parent-id")
handle(&p, args, long, human, path, parentId)
},
}
func init() {
ListCmd.Flags().BoolVarP(&human, "human", "H", false, "display human readable format")
ListCmd.Flags().BoolVarP(&long, "long", "l", false, "display long format")
ListCmd.Flags().StringVarP(&path, "path", "p", "/", "display the specified path")
ListCmd.Flags().StringVarP(&parentId, "parent-id", "P", "", "display the specified parent id")
}
func handle(p *api.PikPak, args []string, long, human bool, path, parentId string) {
var err error
if len(args) > 0 {
path = args[0]
}
if parentId == "" {
parentId, err = p.GetPathFolderId(path)
if err != nil {
fmt.Println("Get path folder id error")
logx.Error(err)
return
}
}
files, err := p.GetFolderFileStatList(parentId)
if err != nil {
fmt.Println("Get folder file stat list error")
logx.Error(err)
return
}
for _, file := range files {
if long {
display(2, &file)
} else {
display(0, &file)
}
}
}
// mode 0: normal print
// mode 2: long format
func display(mode int, file *api.FileStat) {
size := utils.FormatStorage(file.Size, human)
switch mode {
case 0:
if file.Kind == api.FileKindFolder {
fmt.Printf("%-20s\n", color.GreenString(file.Name))
} else {
fmt.Printf("%-20s\n", file.Name)
}
case 2:
if file.Kind == api.FileKindFolder {
fmt.Printf("%-26s %-8s %-19s %s\n", file.ID, size, file.CreatedTime.Format("2006-01-02 15:04:05"), color.GreenString(file.Name))
} else {
fmt.Printf("%-26s %-8s %-19s %s\n", file.ID, size, file.CreatedTime.Format("2006-01-02 15:04:05"), file.Name)
}
}
}
================================================
FILE: cli/new/folder/folder.go
================================================
package folder
import (
"fmt"
"path/filepath"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/spf13/cobra"
)
var NewFolderCommand = &cobra.Command{
Use: "folder",
Short: `Create a folder to pikpak server`,
Run: func(cmd *cobra.Command, args []string) {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
if len(args) > 0 {
handleNewFolder(&p, args)
} else {
fmt.Println("Please input the folder name")
}
},
}
var path string
var parentId string
func init() {
NewFolderCommand.Flags().StringVarP(&path, "path", "p", "/", "The path of the folder")
NewFolderCommand.Flags().StringVarP(&parentId, "parent-id", "P", "", "The parent id")
}
// new folder
func handleNewFolder(p *api.PikPak, folders []string) {
baseParentID := parentId
var err error
if baseParentID == "" {
baseParentID, err = p.GetPathFolderId(path)
if err != nil {
fmt.Println("Get parent id failed")
logx.Error(err)
return
}
}
for _, folder := range folders {
folder = strings.TrimSpace(folder)
if folder == "" {
fmt.Println("Folder name cannot be empty")
continue
}
cleanFolder := filepath.Clean(folder)
if cleanFolder == "." || cleanFolder == string(filepath.Separator) {
fmt.Printf("Folder path is invalid: %s\n", folder)
continue
}
createParentID := baseParentID
if filepath.IsAbs(cleanFolder) {
createParentID = ""
}
_, err := p.GetDeepFolderOrCreateId(createParentID, cleanFolder)
if err != nil {
fmt.Printf("Create folder %s failed\n", folder)
logx.Error(err)
} else {
fmt.Printf("Create folder %s success\n", folder)
}
}
}
================================================
FILE: cli/new/new.go
================================================
package new
import (
"github.com/52funny/pikpakcli/cli/new/folder"
"github.com/52funny/pikpakcli/cli/new/sha"
"github.com/52funny/pikpakcli/cli/new/url"
"github.com/spf13/cobra"
)
var NewCommand = &cobra.Command{
Use: "new",
Aliases: []string{"n"},
Short: `New can do something like create folder or other things`,
Run: func(cmd *cobra.Command, args []string) {
cmd.Help()
},
}
func init() {
NewCommand.AddCommand(folder.NewFolderCommand)
NewCommand.AddCommand(sha.NewShaCommand)
NewCommand.AddCommand(url.NewUrlCommand)
}
================================================
FILE: cli/new/sha/sha.go
================================================
package sha
import (
"bufio"
"fmt"
"io"
"os"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/spf13/cobra"
)
var NewShaCommand = &cobra.Command{
Use: "sha",
Short: `Create a file according to sha`,
Run: func(cmd *cobra.Command, args []string) {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
// input mode
if strings.TrimSpace(input) != "" {
f, err := os.OpenFile(input, os.O_RDONLY, 0666)
if err != nil {
fmt.Printf("Open file %s failed\n", input)
logx.Error(err)
return
}
reader := bufio.NewReader(f)
shas := make([]string, 0)
for {
lineBytes, _, err := reader.ReadLine()
if err == io.EOF {
break
}
shas = append(shas, string(lineBytes))
}
handleNewSha(&p, shas)
return
}
// args mode
if len(args) > 0 {
handleNewSha(&p, args)
} else {
fmt.Println("Please input the folder name")
}
},
}
var path string
var parentId string
var input string
func init() {
NewShaCommand.Flags().StringVarP(&path, "path", "p", "/", "The path of the folder")
NewShaCommand.Flags().StringVarP(&input, "input", "i", "", "The input of the sha file")
NewShaCommand.Flags().StringVarP(&parentId, "parent-id", "P", "", "The parent id")
}
// new folder
func handleNewSha(p *api.PikPak, shas []string) {
var err error
if parentId == "" {
parentId, err = p.GetPathFolderId(path)
if err != nil {
fmt.Println("Get parent id failed")
logx.Error(err)
return
}
}
for _, sha := range shas {
sha = sha[strings.Index(sha, "://")+3:]
shaElements := strings.Split(sha, "|")
if len(shaElements) != 3 {
fmt.Println("The sha format is wrong:", sha)
continue
}
name, size, sha := shaElements[0], shaElements[1], shaElements[2]
err := p.CreateShaFile(parentId, name, size, sha)
if err != nil {
fmt.Println("Create sha file failed")
logx.Error(err)
continue
}
fmt.Println("Create sha file success:", name)
}
}
================================================
FILE: cli/new/url/url.go
================================================
package url
import (
"bufio"
"fmt"
"io"
"os"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/spf13/cobra"
)
var NewUrlCommand = &cobra.Command{
Use: "url",
Short: `Create a file according to url`,
Run: func(cmd *cobra.Command, args []string) {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
if cli {
handleCli(&p)
return
}
// input mode
if strings.TrimSpace(input) != "" {
f, err := os.OpenFile(input, os.O_RDONLY, 0666)
if err != nil {
fmt.Printf("Open file %s failed\n", input)
logx.Error(err)
return
}
reader := bufio.NewReader(f)
shas := make([]string, 0)
for {
lineBytes, _, err := reader.ReadLine()
if err == io.EOF {
break
}
shas = append(shas, string(lineBytes))
}
handleNewUrl(&p, shas)
return
}
// args mode
if len(args) > 0 {
handleNewUrl(&p, args)
} else {
fmt.Println("Please input the folder name")
}
},
}
var path string
var parentId string
var input string
var cli bool
func init() {
NewUrlCommand.Flags().StringVarP(&path, "path", "p", "/", "The path of the folder")
NewUrlCommand.Flags().StringVarP(&parentId, "parent-id", "P", "", "The parent id")
NewUrlCommand.Flags().StringVarP(&input, "input", "i", "", "The input of the sha file")
NewUrlCommand.Flags().BoolVarP(&cli, "cli", "c", false, "The cli mode")
}
// new folder
func handleNewUrl(p *api.PikPak, shas []string) {
var err error
if parentId == "" {
parentId, err = p.GetPathFolderId(path)
if err != nil {
fmt.Println("Get parent id failed")
logx.Error(err)
return
}
}
for _, url := range shas {
err := p.CreateUrlFile(parentId, url)
if err != nil {
fmt.Println("Create url file failed")
logx.Error(err)
continue
}
fmt.Println("Create url file success:", url)
}
}
func handleCli(p *api.PikPak) {
var err error
if parentId == "" {
parentId, err = p.GetPathFolderId(path)
if err != nil {
fmt.Println("Get parent id failed")
logx.Error(err)
return
}
}
reader := bufio.NewReader(os.Stdin)
for {
fmt.Print("> ")
lineBytes, _, err := reader.ReadLine()
if err == io.EOF {
break
}
url := string(lineBytes)
err = p.CreateUrlFile(parentId, url)
if err != nil {
fmt.Println("Create url file failed")
logx.Error(err)
continue
}
fmt.Println("Create url file success:", url)
}
}
================================================
FILE: cli/quota/quota.go
================================================
package quota
import (
"fmt"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/spf13/cobra"
)
var human bool
var QuotaCmd = &cobra.Command{
Use: "quota",
Short: `Get the quota for the pikpak cloud`,
Run: func(cmd *cobra.Command, args []string) {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
q, err := p.GetQuota()
if err != nil {
fmt.Println("Get cloud quota error")
logx.Error(err)
return
}
fmt.Println("Storage:")
fmt.Printf("%-20s%-20s\n", "total", "used")
if human {
fmt.Printf("%-20s%-20s\n", utils.FormatStorage(q.Quota.Limit, true), utils.FormatStorage(q.Quota.Usage, true))
} else {
fmt.Printf("%-20s%-20s\n", q.Quota.Limit, q.Quota.Usage)
}
displayCloudDownload(q.Quotas.CloudDownload)
transfer, err := p.GetTransferQuota()
if err != nil {
fmt.Println("Get transfer quota error")
logx.Error(err)
return
}
displayMonthlyTransferQuota(transfer.Base)
},
}
func init() {
QuotaCmd.Flags().BoolVarP(&human, "human", "H", false, "display human readable format")
}
func displayCloudDownload(cloudDownload api.Quota) {
fmt.Printf("\ncloud download:\n")
fmt.Printf("%-20s%-20s%-20s\n", "total", "used", "remaining")
remaining, err := cloudDownload.Remaining()
if err != nil {
fmt.Printf("%-20s%-20s%-20s\n", formatQuotaValue(cloudDownload.Limit), formatQuotaValue(cloudDownload.Usage), "N/A")
return
}
fmt.Printf("%-20s%-20s%-20s\n", formatQuotaValue(cloudDownload.Limit), formatQuotaValue(cloudDownload.Usage), formatTransferValue(remaining))
}
func displayMonthlyTransferQuota(base api.TransferQuotaBase) {
fmt.Printf("\nmonthly transfer:\n")
fmt.Printf("%-20s%-20s%-20s%-20s\n", "type", "total", "used", "remaining")
displayTransferRow("cloud download", base.Offline)
displayTransferRow("download", base.Download)
displayTransferRow("upload", base.Upload)
}
func displayTransferRow(name string, quota api.TransferQuota) {
fmt.Printf(
"%-20s%-20s%-20s%-20s\n",
name,
formatTransferValue(quota.TotalAssets),
formatTransferValue(quota.Assets),
formatTransferValue(quota.Remaining()),
)
}
func formatTransferValue(size int64) string {
return utils.FormatStorage(fmt.Sprintf("%d", size), human)
}
func formatQuotaValue(size string) string {
return utils.FormatStorage(size, human)
}
================================================
FILE: cli/quota/quota_test.go
================================================
package quota
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestFormatTransferValue(t *testing.T) {
human = false
assert.Equal(t, "2048", formatTransferValue(2048))
human = true
assert.Equal(t, "2KB", formatTransferValue(2048))
}
================================================
FILE: cli/rename/rename.go
================================================
package rename
import (
"fmt"
"path/filepath"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/spf13/cobra"
)
var RenameCmd = &cobra.Command{
Use: "rename ",
Short: "Rename a file or folder on the PikPak drive",
Long: `Rename a file or folder on the PikPak drive.
Example: pikpakcli rename /my-folder/old-name.txt new-name.txt`,
Args: cobra.ExactArgs(2),
RunE: func(cmd *cobra.Command, args []string) error {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
if err := p.Login(); err != nil {
fmt.Println("Login failed")
logx.Error(err)
return nil
}
oldPath := args[0]
newName := strings.TrimSpace(args[1])
if newName == "" {
return fmt.Errorf("new name cannot be empty")
}
if filepath.Base(newName) != newName {
return fmt.Errorf("new name must not contain path separators")
}
expandedPaths, err := api.ExpandRemotePatterns(&p, "/", []string{oldPath}, false)
if err != nil {
fmt.Printf("Could not find file or folder at path '%s'\n", oldPath)
logx.Error(err)
return nil
}
if len(expandedPaths) != 1 {
return fmt.Errorf("rename target must match exactly one path")
}
oldPath = expandedPaths[0]
fileStat, err := p.GetFileByPath(oldPath)
if err != nil {
fmt.Printf("Could not find file or folder at path '%s'\n", oldPath)
logx.Error(err)
return nil
}
if err := p.Rename(fileStat.ID, newName); err != nil {
fmt.Printf("Failed to rename %s\n", oldPath)
logx.Error(err)
return nil
}
fmt.Printf("Successfully renamed '%s' to '%s'\n", oldPath, newName)
return nil
},
}
================================================
FILE: cli/root.go
================================================
package cli
import (
"fmt"
"os"
del "github.com/52funny/pikpakcli/cli/del"
"github.com/52funny/pikpakcli/cli/download"
"github.com/52funny/pikpakcli/cli/empty"
"github.com/52funny/pikpakcli/cli/list"
"github.com/52funny/pikpakcli/cli/new"
"github.com/52funny/pikpakcli/cli/quota"
"github.com/52funny/pikpakcli/cli/rename"
"github.com/52funny/pikpakcli/cli/rubbish"
"github.com/52funny/pikpakcli/cli/share"
"github.com/52funny/pikpakcli/cli/upload"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "pikpakcli",
Short: "Pikpakcli is a command line interface for Pikpak",
Run: func(cmd *cobra.Command, args []string) {
cmd.Help()
},
PersistentPreRun: func(cmd *cobra.Command, args []string) {
err := conf.InitConfig(configPath)
if err != nil {
fmt.Println("Init config failed")
logx.Error(err)
os.Exit(1)
}
logx.Init(debug, debugTopics)
},
}
// Config path
var configPath string
// Debug mode
var debug bool
var debugTopics []string
// Initialize the command line interface
func init() {
rootCmd.PersistentFlags().BoolVar(&debug, "debug", false, "debug mode")
rootCmd.PersistentFlags().StringSliceVar(&debugTopics, "debug-topic", nil, "enable debug topics: api,session,transfer")
rootCmd.PersistentFlags().StringVar(&configPath, "config", "config.yml", "config file path")
rootCmd.AddCommand(upload.UploadCmd)
rootCmd.AddCommand(download.DownloadCmd)
rootCmd.AddCommand(share.ShareCommand)
rootCmd.AddCommand(new.NewCommand)
rootCmd.AddCommand(quota.QuotaCmd)
rootCmd.AddCommand(list.ListCmd)
rootCmd.AddCommand(del.DeleteCmd)
rootCmd.AddCommand(empty.EmptyCmd)
rootCmd.AddCommand(rubbish.RubbishCmd)
rootCmd.AddCommand(rename.RenameCmd)
rootCmd.AddCommand(shellCmd)
}
// Execute the command line interface
func Execute() {
if err := rootCmd.Execute(); err != nil {
os.Exit(1)
}
}
================================================
FILE: cli/rubbish/rubbish.go
================================================
package rubbish
import (
"bufio"
"context"
"errors"
"fmt"
"io"
"net/http"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"sync"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/spf13/cobra"
)
var rubbishPath string
var rulesPath string
var rubbishConcurrency int
var rubbishDeleteMode bool
var openRulesFile bool
var openRulesDir bool
var downloadRules bool
const (
defaultRulesRelativePath = "rules/rubbish_rules.txt"
defaultRulesDownloadURL = "https://raw.githubusercontent.com/52funny/pikpakcli/master/rules/rubbish_rules.txt"
)
type rubbishProvider interface {
GetPathFolderId(dirPath string) (string, error)
GetFolderFileStatList(parentId string) ([]api.FileStat, error)
DeleteFile(fileId string) error
}
type compiledRules struct {
includes []string
excludes []string
}
type rubbishMatch struct {
path string
pattern string
}
type rubbishWalkState struct {
sem chan struct{}
mu sync.Mutex
}
type rubbishFolderResult struct {
matches []rubbishMatch
err error
}
var RubbishCmd = &cobra.Command{
Use: "rubbish [path]",
Short: "Recursively find rubbish files on the PikPak server using text rules",
Args: cobra.MaximumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
path := rubbishPath
if len(args) > 0 {
path = args[0]
}
resolvedRulesPath, err := resolveRulesPath(rulesPath)
if err != nil {
fmt.Printf("Resolve rubbish rules failed: %v\n", err)
return
}
if downloadRules {
if err := downloadDefaultRules(resolvedRulesPath, defaultRulesDownloadURL); err != nil {
fmt.Printf("Download rubbish rules failed: %v\n", err)
return
}
fmt.Printf("Downloaded rubbish rules to %s\n", resolvedRulesPath)
}
if openRulesDir {
if err := ensureDefaultRulesFile(resolvedRulesPath); err != nil {
fmt.Printf("Prepare rubbish rules failed: %v\n", err)
return
}
if err := openLocalPath(filepath.Dir(resolvedRulesPath)); err != nil {
fmt.Printf("Open rules directory failed: %v\n", err)
return
}
fmt.Printf("Opened rules directory: %s\n", filepath.Dir(resolvedRulesPath))
return
}
if openRulesFile {
if err := ensureDefaultRulesFile(resolvedRulesPath); err != nil {
fmt.Printf("Prepare rubbish rules failed: %v\n", err)
return
}
if err := openLocalPath(resolvedRulesPath); err != nil {
fmt.Printf("Open rules file failed: %v\n", err)
return
}
fmt.Printf("Opened rules file: %s\n", resolvedRulesPath)
return
}
if strings.TrimSpace(rulesPath) == "" {
if err := ensureDefaultRulesFile(resolvedRulesPath); err != nil {
fmt.Printf("Prepare rubbish rules failed: %v\n", err)
return
}
}
rules, err := loadRules(resolvedRulesPath)
if err != nil {
fmt.Printf("Load rubbish rules failed: %v\n", err)
return
}
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
if err := p.Login(); err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
matches, err := handleRubbish(cmd.Context(), &p, path, rules, rubbishConcurrency, rubbishDeleteMode)
if err != nil {
if errors.Is(err, context.Canceled) {
fmt.Println("Rubbish scan canceled")
return
}
fmt.Println("Handle rubbish failed")
logx.Error(err)
return
}
if len(matches) == 0 {
fmt.Printf("No rubbish files matched under %s\n", path)
return
}
for _, match := range matches {
if rubbishDeleteMode {
fmt.Printf("Deleted rubbish: %s (matched %s)\n", match.path, match.pattern)
continue
}
fmt.Printf("Rubbish file: %s (matched %s)\n", match.path, match.pattern)
}
},
}
func init() {
RubbishCmd.Flags().StringVarP(&rubbishPath, "path", "p", "/", "The path where to scan rubbish files recursively")
RubbishCmd.Flags().StringVar(&rulesPath, "rules", "", "Path or URL to the rubbish rules file")
RubbishCmd.Flags().IntVarP(&rubbishConcurrency, "concurrency", "c", 8, "number of folders to process concurrently")
RubbishCmd.Flags().BoolVarP(&rubbishDeleteMode, "delete", "d", false, "delete matched rubbish files instead of only listing them")
RubbishCmd.Flags().BoolVar(&openRulesFile, "open-rules", false, "Open the rubbish rules file, downloading the default file to the config directory when needed")
RubbishCmd.Flags().BoolVar(&openRulesDir, "open-rules-dir", false, "Open the rubbish rules directory, downloading the default file to the config directory when needed")
RubbishCmd.Flags().BoolVar(&downloadRules, "download-rules", false, "Download the default rubbish rules file from GitHub into the config directory before running")
}
func loadRules(path string) (compiledRules, error) {
expandedPath := utils.ExpandLocalPath(path)
file, err := os.Open(expandedPath)
if err != nil {
return compiledRules{}, err
}
defer file.Close()
var rules compiledRules
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" || strings.HasPrefix(line, "#") {
continue
}
exclude := strings.HasPrefix(line, "!")
if exclude {
line = strings.TrimSpace(strings.TrimPrefix(line, "!"))
}
if line == "" {
continue
}
if exclude {
rules.excludes = append(rules.excludes, line)
continue
}
rules.includes = append(rules.includes, line)
}
if err := scanner.Err(); err != nil {
return compiledRules{}, err
}
if len(rules.includes) == 0 {
return compiledRules{}, fmt.Errorf("no include rules found in %s", expandedPath)
}
return rules, nil
}
func resolveRulesPath(raw string) (string, error) {
trimmed := strings.TrimSpace(raw)
if trimmed == "" {
return defaultRulesPath()
}
if isRemoteRulesSource(trimmed) {
target, err := defaultRulesPath()
if err != nil {
return "", err
}
if err := downloadDefaultRules(target, trimmed); err != nil {
return "", err
}
return target, nil
}
expanded := utils.ExpandLocalPath(trimmed)
info, err := os.Stat(expanded)
if err == nil && info.IsDir() {
return filepath.Join(expanded, filepath.Base(defaultRulesRelativePath)), nil
}
if err != nil && !os.IsNotExist(err) {
return "", err
}
return expanded, nil
}
func defaultRulesPath() (string, error) {
configDir, err := os.UserConfigDir()
if err != nil {
return "", fmt.Errorf("get config dir error: %w", err)
}
return filepath.Join(configDir, "pikpakcli", defaultRulesRelativePath), nil
}
func ensureDefaultRulesFile(path string) error {
if path == "" {
return errors.New("rules path cannot be empty")
}
if _, err := os.Stat(path); err == nil {
return nil
} else if !os.IsNotExist(err) {
return err
}
return downloadDefaultRules(path, defaultRulesDownloadURL)
}
func downloadDefaultRules(targetPath string, sourceURL string) error {
if err := utils.CreateDirIfNotExist(filepath.Dir(targetPath)); err != nil {
return err
}
resp, err := http.Get(sourceURL)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("download rules returned %s", resp.Status)
}
bs, err := io.ReadAll(resp.Body)
if err != nil {
return err
}
if err := os.WriteFile(targetPath, bs, 0o644); err != nil {
return err
}
return nil
}
func isRemoteRulesSource(path string) bool {
return strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://")
}
func openLocalPath(path string) error {
name, args, err := buildLocalOpenCommand(runtime.GOOS, path)
if err != nil {
return err
}
return exec.Command(name, args...).Start()
}
func buildLocalOpenCommand(goos string, path string) (string, []string, error) {
switch goos {
case "darwin":
return "open", []string{path}, nil
case "linux":
return "xdg-open", []string{path}, nil
case "windows":
return "cmd", []string{"/c", "start", "", path}, nil
default:
return "", nil, fmt.Errorf("unsupported platform: %s", goos)
}
}
func handleRubbish(ctx context.Context, p rubbishProvider, rootPath string, rules compiledRules, concurrency int, deleteMode bool) ([]rubbishMatch, error) {
if ctx == nil {
ctx = context.Background()
}
if err := ctx.Err(); err != nil {
return nil, err
}
rootID, err := p.GetPathFolderId(rootPath)
if err != nil {
return nil, err
}
if concurrency < 1 {
concurrency = 1
}
matches := make([]rubbishMatch, 0)
state := rubbishWalkState{
sem: make(chan struct{}, concurrency),
}
if err := walkRubbish(ctx, p, rootID, filepath.Clean(rootPath), rules, deleteMode, &matches, &state); err != nil {
return nil, err
}
return matches, nil
}
func walkRubbish(ctx context.Context, p rubbishProvider, folderID, currentPath string, rules compiledRules, deleteMode bool, matches *[]rubbishMatch, state *rubbishWalkState) error {
if err := ctx.Err(); err != nil {
return err
}
files, err := p.GetFolderFileStatList(folderID)
if err != nil {
return err
}
results := make(chan rubbishFolderResult, len(files))
var childFolders int
for _, file := range files {
if err := ctx.Err(); err != nil {
return err
}
childPath := filepath.Join(currentPath, file.Name)
if file.Kind == api.FileKindFolder {
childFolders++
select {
case <-ctx.Done():
return ctx.Err()
case state.sem <- struct{}{}:
go func(file api.FileStat, childPath string) {
defer func() {
<-state.sem
}()
localMatches := make([]rubbishMatch, 0)
err := walkRubbish(ctx, p, file.ID, childPath, rules, deleteMode, &localMatches, state)
if err == nil {
if pattern, ok := rules.Match(childPath); ok {
if deleteMode {
err = p.DeleteFile(file.ID)
}
if err == nil {
localMatches = append(localMatches, rubbishMatch{path: childPath, pattern: pattern})
}
}
}
results <- rubbishFolderResult{matches: localMatches, err: err}
}(file, childPath)
default:
localMatches := make([]rubbishMatch, 0)
if err := walkRubbish(ctx, p, file.ID, childPath, rules, deleteMode, &localMatches, state); err != nil {
return err
}
if pattern, ok := rules.Match(childPath); ok {
if deleteMode {
if err := p.DeleteFile(file.ID); err != nil {
return err
}
}
localMatches = append(localMatches, rubbishMatch{path: childPath, pattern: pattern})
}
results <- rubbishFolderResult{matches: localMatches}
}
continue
}
pattern, ok := rules.Match(childPath)
if !ok {
continue
}
if deleteMode {
if err := p.DeleteFile(file.ID); err != nil {
return err
}
}
state.mu.Lock()
*matches = append(*matches, rubbishMatch{path: childPath, pattern: pattern})
state.mu.Unlock()
}
for i := 0; i < childFolders; i++ {
select {
case <-ctx.Done():
return ctx.Err()
case result := <-results:
if result.err != nil {
return result.err
}
state.mu.Lock()
*matches = append(*matches, result.matches...)
state.mu.Unlock()
}
}
return nil
}
func (r compiledRules) Match(path string) (string, bool) {
normalizedPath := filepath.Clean(path)
if normalizedPath == "." {
normalizedPath = string(filepath.Separator)
}
name := filepath.Base(normalizedPath)
for _, pattern := range r.excludes {
if patternMatches(pattern, normalizedPath, name) {
return "", false
}
}
for _, pattern := range r.includes {
if patternMatches(pattern, normalizedPath, name) {
return pattern, true
}
}
return "", false
}
func patternMatches(pattern, fullPath, name string) bool {
pattern = filepath.Clean(strings.TrimSpace(pattern))
if pattern == "." {
return false
}
matchTarget := name
if strings.Contains(pattern, string(filepath.Separator)) {
matchTarget = strings.TrimPrefix(fullPath, string(filepath.Separator))
if strings.HasPrefix(pattern, string(filepath.Separator)) {
matchTarget = fullPath
}
}
if !hasWildcard(pattern) {
return matchTarget == pattern
}
matched, err := filepath.Match(pattern, matchTarget)
return err == nil && matched
}
func hasWildcard(pattern string) bool {
return strings.ContainsAny(pattern, "*?[")
}
================================================
FILE: cli/rubbish/rubbish_test.go
================================================
package rubbish
import (
"context"
"errors"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"sync"
"testing"
"github.com/52funny/pikpakcli/internal/api"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
type fakeRubbishProvider struct {
pathToID map[string]string
folders map[string][]api.FileStat
deletedFiles []string
mu sync.Mutex
}
func (f *fakeRubbishProvider) GetPathFolderId(dirPath string) (string, error) {
if id, ok := f.pathToID[filepath.Clean(dirPath)]; ok {
return id, nil
}
return "", errors.New("path not found")
}
func (f *fakeRubbishProvider) GetFolderFileStatList(parentId string) ([]api.FileStat, error) {
f.mu.Lock()
defer f.mu.Unlock()
files := f.folders[parentId]
cloned := make([]api.FileStat, len(files))
copy(cloned, files)
return cloned, nil
}
func (f *fakeRubbishProvider) DeleteFile(fileId string) error {
f.mu.Lock()
defer f.mu.Unlock()
f.deletedFiles = append(f.deletedFiles, fileId)
for parentID, files := range f.folders {
filtered := files[:0]
for _, file := range files {
if file.ID != fileId {
filtered = append(filtered, file)
}
}
f.folders[parentID] = filtered
}
delete(f.folders, fileId)
return nil
}
func TestLoadRules(t *testing.T) {
dir := t.TempDir()
rulesFile := filepath.Join(dir, "rules.txt")
err := os.WriteFile(rulesFile, []byte("# comment\n\n.DS_Store\n*.tmp\n!important.tmp\n"), 0o644)
require.NoError(t, err)
rules, err := loadRules(rulesFile)
require.NoError(t, err)
assert.Equal(t, []string{".DS_Store", "*.tmp"}, rules.includes)
assert.Equal(t, []string{"important.tmp"}, rules.excludes)
}
func TestCompiledRulesMatch(t *testing.T) {
rules := compiledRules{
includes: []string{".DS_Store", "*.tmp", "cache/*.part", "/System/*"},
excludes: []string{"keep.tmp", "!ignored", "/System/keep/*"},
}
pattern, ok := rules.Match("/Movies/.DS_Store")
require.True(t, ok)
assert.Equal(t, ".DS_Store", pattern)
pattern, ok = rules.Match("/Movies/video.tmp")
require.True(t, ok)
assert.Equal(t, "*.tmp", pattern)
pattern, ok = rules.Match("/cache/file.part")
require.True(t, ok)
assert.Equal(t, "cache/*.part", pattern)
_, ok = rules.Match("/Movies/keep.tmp")
assert.False(t, ok)
pattern, ok = rules.Match("/System/logs")
require.True(t, ok)
assert.Equal(t, "/System/*", pattern)
_, ok = rules.Match("/System/keep/file")
assert.False(t, ok)
}
func TestHandleRubbishListsAndDeletesMatches(t *testing.T) {
provider := &fakeRubbishProvider{
pathToID: map[string]string{
filepath.Clean("/"): "root",
},
folders: map[string][]api.FileStat{
"root": {
{ID: "movies", Name: "Movies", Kind: api.FileKindFolder},
{ID: "ds", Name: ".DS_Store", Kind: api.FileKindFile},
{ID: "keep", Name: "keep.tmp", Kind: api.FileKindFile},
},
"movies": {
{ID: "partial", Name: "video.part", Kind: api.FileKindFile},
{ID: "poster", Name: "poster.jpg", Kind: api.FileKindFile},
},
},
}
rules := compiledRules{
includes: []string{".DS_Store", "*.part", "*.tmp"},
excludes: []string{"keep.tmp"},
}
matches, err := handleRubbish(context.Background(), provider, "/", rules, 4, false)
require.NoError(t, err)
assert.ElementsMatch(t, []rubbishMatch{
{path: filepath.Clean("/.DS_Store"), pattern: ".DS_Store"},
{path: filepath.Clean("/Movies/video.part"), pattern: "*.part"},
}, matches)
assert.Empty(t, provider.deletedFiles)
matches, err = handleRubbish(context.Background(), provider, "/", rules, 4, true)
require.NoError(t, err)
assert.ElementsMatch(t, []rubbishMatch{
{path: filepath.Clean("/.DS_Store"), pattern: ".DS_Store"},
{path: filepath.Clean("/Movies/video.part"), pattern: "*.part"},
}, matches)
assert.ElementsMatch(t, []string{"ds", "partial"}, provider.deletedFiles)
}
func TestHandleRubbishNormalizesConcurrency(t *testing.T) {
provider := &fakeRubbishProvider{
pathToID: map[string]string{
filepath.Clean("/"): "root",
},
folders: map[string][]api.FileStat{
"root": {
{ID: "tmp", Name: "a.tmp", Kind: api.FileKindFile},
},
},
}
matches, err := handleRubbish(context.Background(), provider, "/", compiledRules{includes: []string{"*.tmp"}}, 0, false)
require.NoError(t, err)
assert.Equal(t, []rubbishMatch{{path: filepath.Clean("/a.tmp"), pattern: "*.tmp"}}, matches)
}
func TestDefaultRulesPathUsesConfigDir(t *testing.T) {
configDir, err := os.UserConfigDir()
require.NoError(t, err)
path, err := defaultRulesPath()
require.NoError(t, err)
assert.Equal(t, filepath.Join(configDir, "pikpakcli", "rules", "rubbish_rules.txt"), path)
}
func TestResolveRulesPathForDirectory(t *testing.T) {
dir := t.TempDir()
path, err := resolveRulesPath(dir)
require.NoError(t, err)
assert.Equal(t, filepath.Join(dir, "rubbish_rules.txt"), path)
}
func TestDownloadDefaultRules(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte(".DS_Store\n*.tmp\n"))
}))
defer server.Close()
targetDir := t.TempDir()
targetPath := filepath.Join(targetDir, "rules.txt")
err := downloadDefaultRules(targetPath, server.URL)
require.NoError(t, err)
bs, err := os.ReadFile(targetPath)
require.NoError(t, err)
assert.Equal(t, ".DS_Store\n*.tmp\n", string(bs))
}
func TestBuildLocalOpenCommand(t *testing.T) {
name, args, err := buildLocalOpenCommand("linux", "/tmp/rules.txt")
require.NoError(t, err)
assert.Equal(t, "xdg-open", name)
assert.Equal(t, []string{"/tmp/rules.txt"}, args)
}
================================================
FILE: cli/share/share.go
================================================
package share
import (
"fmt"
"os"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/spf13/cobra"
)
var ShareCommand = &cobra.Command{
Use: "share",
Aliases: []string{"d"},
Short: `Share file links on the pikpak server`,
Run: func(cmd *cobra.Command, args []string) {
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
if len(args) > 0 {
args, err = api.ExpandRemotePatterns(&p, folder, args, false)
if err != nil {
fmt.Println("Expand share target failed")
logx.Error(err)
return
}
}
// Output file handle
var f = os.Stdout
if strings.TrimSpace(output) != "" {
file, err := os.Create(output)
if err != nil {
fmt.Println("Create file failed")
logx.Error(err)
return
}
defer file.Close()
f = file
}
if len(args) > 0 {
shareFiles(&p, args, f)
} else {
shareFolder(&p, f)
}
},
}
// Specifies the folder of the pikpak server
// default is the root folder
var folder string
// Specifies the file to write
// default is the stdout
var output string
var parentId string
func init() {
ShareCommand.Flags().StringVarP(&folder, "path", "p", "/", "specific the folder of the pikpak server")
ShareCommand.Flags().StringVarP(&output, "output", "o", "", "specific the file to write")
ShareCommand.Flags().StringVarP(&parentId, "parent-id", "P", "", "parent folder id")
}
// Share folder
func shareFolder(p *api.PikPak, f *os.File) {
var err error
if parentId == "" {
parentId, err = p.GetDeepFolderId("", folder)
if err != nil {
fmt.Println("Get parent id failed")
logx.Error(err)
return
}
}
fileStat, err := p.GetFolderFileStatList(parentId)
if err != nil {
fmt.Println("Get folder file stat list failed")
logx.Error(err)
return
}
for _, stat := range fileStat {
// logrus.Debug(stat)
if stat.Kind == api.FileKindFile {
fmt.Fprintf(f, "PikPak://%s|%s|%s\n", stat.Name, stat.Size, stat.Hash)
}
}
}
// Share files
func shareFiles(p *api.PikPak, args []string, f *os.File) {
var err error
if parentId == "" {
parentId, err = p.GetPathFolderId(folder)
if err != nil {
fmt.Println("Get parent id failed")
logx.Error(err)
return
}
}
for _, path := range args {
stat, err := resolveShareTarget(p, parentId, path)
if err != nil {
fmt.Println(path, "get file stat error")
logx.Error(err)
continue
}
fmt.Fprintf(f, "PikPak://%s|%s|%s\n", stat.Name, stat.Size, stat.Hash)
}
}
func resolveShareTarget(p *api.PikPak, resolvedParentID string, target string) (api.FileStat, error) {
if strings.HasPrefix(target, "/") || strings.Contains(target, "/") {
return p.GetFileByPath(target)
}
return p.GetFileStat(resolvedParentID, target)
}
================================================
FILE: cli/shell.go
================================================
package cli
import (
ishell "github.com/52funny/pikpakcli/internal/shell"
"github.com/spf13/cobra"
)
var shellCmd = &cobra.Command{
Use: "shell",
Short: "Start an interactive PikPak shell",
Run: func(cmd *cobra.Command, args []string) {
ishell.Start(rootCmd)
},
}
================================================
FILE: cli/upload/upload.go
================================================
package upload
import (
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"time"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/spf13/cobra"
)
var UploadCmd = &cobra.Command{
Use: "upload",
Aliases: []string{"u"},
Short: `Upload file to pikpak server`,
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
cmd.Help()
return
}
api.Concurrent = uploadConcurrency
p := api.NewPikPakWithContext(cmd.Context(), conf.Config.Username, conf.Config.Password)
err := p.Login()
if err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
err = p.AuthCaptchaToken("POST:/drive/v1/files")
if err != nil {
fmt.Println("Auth captcha token failed")
logx.Error(err)
return
}
go func() {
ticker := time.NewTicker(time.Second * 7200 * 3 / 4)
defer ticker.Stop()
for range ticker.C {
err := p.RefreshToken()
if err != nil {
logx.Warn("session", "refresh token failed:", err)
continue
}
}
}()
for _, v := range args {
v = utils.ExpandLocalPath(v)
stat, err := os.Stat(v)
if err != nil {
fmt.Printf("Get file %s stat failed\n", v)
logx.Error(err)
continue
}
if stat.IsDir() {
handleUploadFolder(&p, v)
} else {
handleUploadFile(&p, v)
}
}
},
}
// Specifies the folder of the pikpak server
var uploadFolder string
// Specifies the file to upload
var uploadConcurrency int64
// Sync mode
var sync bool
// Parent path id
var parentId string
// Init upload command
func init() {
UploadCmd.Flags().StringVarP(&uploadFolder, "path", "p", "/", "specific the folder of the pikpak server")
UploadCmd.Flags().Int64VarP(&uploadConcurrency, "concurrency", "c", 1<<4, "specific the concurrency of the upload")
UploadCmd.Flags().StringSliceVarP(&exclude, "exn", "e", []string{}, "specific the exclude file or folder")
UploadCmd.Flags().BoolVarP(&sync, "sync", "s", false, "sync mode")
UploadCmd.Flags().StringVarP(&parentId, "parent-id", "P", "", "parent folder id")
}
// Exclude string list
var exclude []string
var defaultExcludeRegexp []*regexp.Regexp = []*regexp.Regexp{
// exclude the hidden file
regexp.MustCompile(`^\..+`),
}
// Dispose the exclude file or folder
func disposeExclude() {
for _, v := range exclude {
defaultExcludeRegexp = append(defaultExcludeRegexp, regexp.MustCompile(v))
}
}
func handleUploadFile(p *api.PikPak, path string) {
var err error
if parentId == "" {
parentId, err = p.GetDeepFolderOrCreateId("", uploadFolder)
if err != nil {
fmt.Printf("Get folder %s id failed\n", uploadFolder)
logx.Error(err)
return
}
}
err = p.UploadFile(parentId, path)
if err != nil {
fmt.Printf("Upload file %s failed\n", path)
logx.Error(err)
return
}
fmt.Printf("Upload file %s success!\n", path)
}
// upload files logic
func handleUploadFolder(p *api.PikPak, path string) {
basePath := filepath.Base(filepath.ToSlash(path))
uploadFilePath, err := utils.GetUploadFilePath(path, defaultExcludeRegexp)
if err != nil {
fmt.Println("Get upload file path failed")
logx.Error(err)
return
}
syncTxt, err := utils.NewSyncTxt(".pikpaksync.txt", sync)
if err != nil {
fmt.Println("Init sync file failed")
logx.Error(err)
return
}
defer syncTxt.Close()
uploadFilePath = syncTxt.UnSync(uploadFilePath)
fmt.Println("upload file list:")
for _, f := range uploadFilePath {
fmt.Println(filepath.Join(basePath, f))
}
if parentId == "" {
parentId, err = p.GetDeepFolderOrCreateId("", uploadFolder)
if err != nil {
fmt.Printf("Get folder %s id error\n", uploadFolder)
logx.Error(err)
return
}
}
logx.Debug("upload", "upload folder: ", uploadFolder, " parentId: ", parentId)
parentId, err = p.GetDeepFolderOrCreateId(parentId, basePath)
if err != nil {
fmt.Printf("Get base_upload_path %s id error\n", basePath)
logx.Error(err)
return
}
parentIdMap := make(map[string]string)
for _, v := range uploadFilePath {
if strings.Contains(v, "/") || strings.Contains(v, "\\") {
var id string
base := filepath.Dir(v)
// Avoid secondary query ids
if mId, ok := parentIdMap[base]; !ok {
id, err = p.GetDeepFolderOrCreateId(parentId, base)
if err != nil {
fmt.Println("Get folder id failed")
logx.Error(err)
}
parentIdMap[base] = id
} else {
id = mId
}
err = p.UploadFile(id, filepath.Join(path, v))
if err != nil {
fmt.Printf("%s upload failed\n", v)
logx.Error(err)
}
syncTxt.WriteString(v + "\n")
fmt.Printf("%s upload success!\n", v)
} else {
err = p.UploadFile(parentId, filepath.Join(path, v))
if err != nil {
fmt.Printf("%s upload failed\n", v)
logx.Error(err)
}
syncTxt.WriteString(v + "\n")
}
}
}
================================================
FILE: conf/config.go
================================================
package conf
import (
"bytes"
"encoding/binary"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"gopkg.in/yaml.v2"
)
type ConfigType struct {
Proxy string `yaml:"proxy"`
Username string `yaml:"username"`
Password string `yaml:"password"`
Open OpenConfig `yaml:"open"`
}
type OpenConfig struct {
DownloadDir string `yaml:"download_dir"`
Default []string `yaml:"default"`
Text []string `yaml:"text"`
Image []string `yaml:"image"`
Video []string `yaml:"video"`
Audio []string `yaml:"audio"`
PDF []string `yaml:"pdf"`
}
var Config ConfigType
// UseProxy returns whether the proxy is used
func (c *ConfigType) UseProxy() bool {
return len(c.Proxy) != 0
}
// Initializing configuration information
func InitConfig(path string) error {
// Firstly, read the config info from executable file
if readFromBinary() == nil {
return nil
}
// Secondly, it reads config.yml from the given path.
// If there is no config.yml in the given path, it reads it from the default config path.
_, err := os.Stat(path)
switch os.IsNotExist(err) {
case true:
if err := readFromConfigDir(); err != nil {
return err
}
case false:
if err := readFromPath(path); err != nil {
return err
}
}
// Not empty
// Must contains '://'
if len(Config.Proxy) != 0 && !strings.Contains(Config.Proxy, "://") {
return fmt.Errorf("proxy should contains ://")
}
return nil
}
// Read config from binary in the end
// config_bytes: n bytes
// end_magic: 10 bytes
// size: 4 bytes
// -----------------------------------
// | config_bytes | size | end_magic |
// -----------------------------------
func readFromBinary() error {
f, err := os.Open(os.Args[0])
if err != nil {
return err
}
defer f.Close()
stat, err := f.Stat()
if err != nil {
return err
}
var end_magic = make([]byte, 10)
n, err := f.ReadAt(end_magic, stat.Size()-10)
if err != nil {
return err
}
if n != 10 {
return fmt.Errorf("read end_magic err: %d", n)
}
// Not have `config.yml` in the end
if !bytes.Equal(end_magic, []byte("config.yml")) {
return fmt.Errorf("not a pikpakcli binary")
}
var size = make([]byte, 4)
n, err = f.ReadAt(size, stat.Size()-14)
if err != nil {
return err
}
if n != 4 {
return fmt.Errorf("read size err: %d", n)
}
configSize := int64(binary.LittleEndian.Uint32(size))
configBuf := make([]byte, configSize)
n, err = f.ReadAt(configBuf, stat.Size()-14-configSize)
if err != nil || n != int(configSize) {
return err
}
if n != int(configSize) {
return fmt.Errorf("read config size err: %d", n)
}
// Unmarshal config
return yaml.Unmarshal(configBuf, &Config)
}
// Read configuration file from the given path
func readFromPath(path string) error {
return readConfig(path)
}
// Read configuration file from config path
func readFromConfigDir() error {
configDir, err := os.UserConfigDir()
if err != nil {
return err
}
return readConfig(filepath.Join(configDir, "pikpakcli", "config.yml"))
}
func readConfig(path string) error {
f, err := os.Open(path)
if err != nil {
return err
}
defer f.Close()
bs, err := io.ReadAll(f)
if err != nil {
return err
}
err = yaml.Unmarshal(bs, &Config)
if err != nil {
return err
}
return nil
}
================================================
FILE: config_example.yml
================================================
# Proxy URL, for example: http://127.0.0.1:7890
proxy:
# PikPak account username or phone number with country code
username: xxx
# PikPak account password
password: xxx
# Local open command settings used by the interactive shell builtin `open`
open:
# Local cache directory for files that need to be downloaded before opening
download_dir:
# Fallback command used when no file-type-specific command is configured
default: []
# Command used to open text and source files
text: []
# Command used to open image files
image: []
# Command used to open video files
video: []
# Command used to open audio files
audio: []
# Command used to open PDF files
pdf: []
================================================
FILE: docs/command.md
================================================
# Command Usage
> For docker users, please refer to the [Docker Command Usage](docs/command_docker.md).
## Upload
- Uploads all files in the local directory to the Movies folder.
```bash
pikpakcli upload -p Movies .
```
- Upload files in local directory except for `mp3`, `jpg` to Movies folder.
```bash
pikpakcli upload -e .mp3,.jpg -p Movies .
```
- Select the number of concurrent tasks for the upload (default is 16).
```bash
pikpakcli -c 20 -p Movies .
```
- Use the `-P` flag to set the `id` of the folder on the Pikpak cloud.
```bash
pikpakcli upload -P AgmoDVmJPYbHn8ito1 .
```
- Running `pikpakcli upload` without any local path arguments shows the command help.
## Download
- Download the target pointed to by `-p`. If it is a directory, download it recursively; if it is a file, download that file.
```bash
pikpakcli download -p Movies
pikpakcli download -p Movies/Peppa_Pig.mp4
```
- Use `-p` as the base remote path, then append the following argument to it. The CLI will decide whether the target is a file or a directory.
```bash
pikpakcli download -p Movies Peppa_Pig.mp4
pikpakcli download -p Movies Cartoons
pikpakcli download -p Movies Kids/Peppa_Pig.mp4
```
- Use an absolute remote path in the argument to override `-p`.
```bash
pikpakcli download -p Movies /TV/Peppa_Pig.mp4
```
- Limit the number of files that can be downloaded at the same time (default: 1).
```bash
pikpakcli download -c 5 -p Movies
```
- Specify the output directory of downloaded files.
```bash
pikpakcli download -p Movies -o Film
```
- Use the `-g` flag to display status information during the download process.
```bash
pikpakcli download -p Movies -o Film -g
```
## Share
- Share links to all files under Movies.
```bash
pikpakcli share -p Movies
```
- Share the link to the specified file.
```bash
pikpakcli share Movies/Peppa_Pig.mp4
```
- Share link output to a specified file.
```bash
pikpakcli share --out sha.txt -p Movies
```
## New
### New Folder
- Create a new folder NewFolder under Movies
```bash
pikpakcli new folder -p Movies NewFolder
```
### New Sha File
- Create a new Sha file under Movies.
```bash
pikpakcli new sha -p /Movies 'PikPak://美国队长.mkv|22809693754|75BFE33237A0C06C725587F87981C567E4E478C3'
```
### New Magnet File
- Create new magnet file.
```bash
pikpakcli new url 'magnet:?xt=urn:btih:e9c98e3ed488611abc169a81d8a21487fd1d0732'
```
## Quota
- Get space on your PikPak cloud drive.
```bash
pikpakcli quota -H
```
## Ls
- Get information about all files in the root directory.
```bash
pikpakcli ls -lH -p /
```
## Delete
- Delete a file by full path from the PikPak cloud.
```bash
pikpakcli delete /Movies/Peppa_Pig.mp4
```
- Delete entries from a specific directory using the `-p` flag.
```bash
pikpakcli delete -p /Movies Peppa_Pig.mp4
```
- Delete multiple entries under the same path.
```bash
pikpakcli delete -p /Movies File1.mp4 File2.mp4
```
## Rubbish
- Scan a directory recursively with the default rubbish rules. If the rule file does not exist in the user config directory, the CLI downloads it from this repository automatically.
```bash
pikpakcli rubbish
pikpakcli rubbish -p /Movies
```
- Preview matched rubbish files without deleting them, then delete them with `-d`.
```bash
pikpakcli rubbish -p /Movies
pikpakcli rubbish -p /Movies -d
```
- Open the local rules file or the local rules directory. If the default rule file is missing, it is downloaded first and then opened.
```bash
pikpakcli rubbish --open-rules
pikpakcli rubbish --open-rules-dir
```
- Download the default rules file explicitly, or use a custom local path or remote URL as the rules source.
```bash
pikpakcli rubbish --download-rules
pikpakcli rubbish --rules ~/.config/pikpakcli/rules/rubbish_rules.txt
pikpakcli rubbish --rules https://raw.githubusercontent.com/52funny/pikpakcli/master/rules/rubbish_rules.txt
```
## Rename
- Rename a file or folder by full path.
```bash
pikpakcli rename /Movies/Peppa_Pig.mp4 Peppa_Pig_S01E01.mp4
```
- Rename a folder.
```bash
pikpakcli rename /Movies/Cartoons Kids
```
## Shell
- Start the interactive shell.
```bash
pikpakcli shell
```
- Change directory and list files in the current path.
```bash
pikpakcli shell
cd "/Movies/Kids Cartoons"
ls
```
- Open a remote file from the shell with a local application.
```bash
pikpakcli shell
cd "/Movies"
open Peppa_Pig.mp4
```
================================================
FILE: docs/command_docker.md
================================================
# Docker Command Usage
For docker users, the most different part is linking the configuration file (i.e., `config.yml`) and folder you want to operate (e.g., `download` or `upload`) into the container.
## Upload
- Uploads all files in the local directory (e.g., `/path/to/upload`) to the `Movies` folder.
```bash
# original cli: pikpakcli upload -p Movies .
# Docker cli
docker run --rm -v /path/to/config.yml:/root/.config/pikpakcli/config.yml -v /path/to/upload/:/upload pikpakcli:latest upload -p Movies /upload
```
- Upload files in local directory except for `mp3`, `jpg` to Movies folder.
```bash
# original cli: pikpakcli upload -e .mp3,.jpg -p Movies .
# Docker cli
docker run --rm -v /path/to/config.yml:/root/.config/pikpakcli/config.yml -v /path/to/upload/:/upload pikpakcli:latest upload -e .mp3,.jpg -p Movies /upload
```
## Download
- Download the target pointed to by `-p`. The target can be a folder or a file.
```bash
# original cli: pikpakcli download -p Movies
# Docker cli
# the option -o is used to specify the folder in container to save downloaded files
docker run --rm -v /path/to/config.yml:/root/.config/pikpakcli/config.yml -v /path/to/download/:/download pikpakcli:latest download -p Movies -o /download
```
- Use `-p` as the base remote path, then append the argument to it. The CLI will decide whether it is a file or a folder.
```bash
# original cli: pikpakcli download -p Movies Peppa_Pig.mp4
# Docker cli
docker run --rm -v /path/to/config.yml:/root/.config/pikpakcli/config.yml -v /path/to/download/:/download pikpakcli:latest download -p Movies Peppa_Pig.mp4 -o /download
```
- Use an absolute remote path in the argument to override `-p`.
```bash
# original cli: pikpakcli download -p Movies /TV/Peppa_Pig.mp4
# Docker cli
docker run --rm -v /path/to/config.yml:/root/.config/pikpakcli/config.yml -v /path/to/download/:/download pikpakcli:latest download -p Movies /TV/Peppa_Pig.mp4 -o /download
```
> Other download commands are omitted here, please refer to the original cli commands in [Command Usage](docs/command.md).
## Wrapper Script
We provide a wrapper script `docker_cli.sh` to simplify the docker command usage. You can run the script directly after setting up the `config.yml` file in the current directory. The script will create two folders `pikpak_downloads` and `pikpak_uploads` in the current directory for download and upload operations respectively.
```bash
# Make the script executable
chmod +x docker_cli.sh
# Run the script for upload
./docker_cli.sh upload -p Movies ./pikpak_uploads
# Run the script for download
./docker_cli.sh download -p Movies -o ./pikpak_downloads
```
================================================
FILE: docs/command_zhCN.md
================================================
# 命令使用方法
## 上传
- 将本地目录下的所有文件上传至 Movies 文件夹内
```bash
pikpakcli upload -p Movies .
```
- 将本地目录下除了后缀名为`mp3`, `jpg`的文件上传至 Movies 文件夹内
```bash
pikpakcli upload -e .mp3,.jpg -p Movies .
```
- 指定上传的协程数目(默认为 16)
```bash
pikpakcli -c 20 -p Movies .
```
- 使用 `-P` 标志设置 Pikpak 云上文件夹的 `id`
```bash
pikpakcli upload -P AgmoDVmJPYbHn8ito1 .
```
- 直接运行 `pikpakcli upload` 且不带任何本地路径参数时,会显示该命令的帮助信息。
## 下载
- 下载 `-p` 指向的目标。如果该目标是文件夹则递归下载,如果是文件则下载该文件
```bash
pikpakcli download -p Movies
pikpakcli download -p Movies/Peppa_Pig.mp4
```
- 把 `-p` 作为远端基路径,再拼接后面的参数。CLI 会自动判断目标是文件还是文件夹
```bash
pikpakcli download -p Movies Peppa_Pig.mp4
pikpakcli download -p Movies Cartoons
pikpakcli download -p Movies Kids/Peppa_Pig.mp4
```
- 如果参数本身是绝对路径,则会覆盖 `-p`
```bash
pikpakcli download -p Movies /TV/Peppa_Pig.mp4
```
- 限制同时下载的文件个数 (默认: 1)
```bash
pikpakcli download -c 5 -p Movies
```
- 指定下载内容的输出目录
```bash
pikpakcli download -p Movies -o Film
```
- 使用 `-g` 标志显示下载过程中的状态信息
```bash
pikpakcli download -p Movies -o Film -g
```
## 分享
- 分享 Movies 下的所有文件的链接
```bash
pikpakcli share -p Movies
```
- 分享指定文件的链接
```bash
pikpakcli share Movies/Peppa_Pig.mp4
```
- 分享链接输出到指定文件
```bash
pikpakcli share --out sha.txt -p Movies
```
## 新建
### 新建文件夹
- 在 Movies 下新建文件夹 NewFolder
```bash
pikpakcli new folder -p Movies NewFolder
```
### 新建 Sha 文件
- 在 Movies 下新建 Sha 文件
```bash
pikpakcli new sha -p /Movies 'PikPak://美国队长.mkv|22809693754|75BFE33237A0C06C725587F87981C567E4E478C3'
```
### 新建磁力
- 新建磁力文件
```bash
pikpakcli new url 'magnet:?xt=urn:btih:e9c98e3ed488611abc169a81d8a21487fd1d0732'
```
## 配额
- 获取 PikPak 云盘的空间
```bash
pikpakcli quota -H
```
## 获取目录信息
- 获取根目录下面的所有文件信息
```bash
pikpakcli ls -lH -p /
```
## 删除
- 按完整路径删除文件
```bash
pikpakcli delete /Movies/Peppa_Pig.mp4
```
- 使用 `-p` 指定父目录后删除其中的文件或文件夹
```bash
pikpakcli delete -p /Movies Peppa_Pig.mp4
```
- 在同一路径下同时删除多个文件或文件夹
```bash
pikpakcli delete -p /Movies File1.mp4 File2.mp4
```
## 垃圾文件清理
- 使用默认垃圾文件规则递归扫描目录。如果用户配置目录中还没有规则文件,CLI 会自动从当前仓库下载默认规则。
```bash
pikpakcli rubbish
pikpakcli rubbish -p /Movies
```
- 默认只预览匹配结果,不会删除;加上 `-d` 后才会执行删除。
```bash
pikpakcli rubbish -p /Movies
pikpakcli rubbish -p /Movies -d
```
- 打开本地规则文件或规则目录。如果默认规则文件不存在,会先下载再打开。
```bash
pikpakcli rubbish --open-rules
pikpakcli rubbish --open-rules-dir
```
- 手动下载默认规则文件,或者指定自定义本地路径 / 远程 URL 作为规则来源。
```bash
pikpakcli rubbish --download-rules
pikpakcli rubbish --rules ~/.config/pikpakcli/rules/rubbish_rules.txt
pikpakcli rubbish --rules https://raw.githubusercontent.com/52funny/pikpakcli/master/rules/rubbish_rules.txt
```
## 重命名
- 按完整路径重命名文件或文件夹
```bash
pikpakcli rename /Movies/Peppa_Pig.mp4 Peppa_Pig_S01E01.mp4
```
- 重命名文件夹
```bash
pikpakcli rename /Movies/Cartoons Kids
```
## 交互 Shell
- 启动交互式 shell
```bash
pikpakcli shell
```
- 在 shell 中切换目录并查看当前目录文件
```bash
pikpakcli shell
cd "/Movies/Kids Cartoons"
ls
```
- 在 shell 中打开远端文件到本地默认程序
```bash
pikpakcli shell
cd "/Movies"
open Peppa_Pig.mp4
```
================================================
FILE: docs/config.md
================================================
## Configuration
The CLI reads the following fields from `config.yml`:
```yml
proxy:
username: xxx
password: xxx
open:
download_dir:
default: []
text: []
image: []
video: []
audio: []
pdf: []
```
### Basic Fields
- `username`: your PikPak account username or phone number with country code such as `+861xxxxxxxxxx`.
- `password`: your PikPak account password.
- `proxy`: optional proxy URL such as `http://127.0.0.1:7890`.
> `proxy` must contain `://`.
### Open Settings
The `open` section is used by the interactive shell builtin `open`.
- `download_dir`: optional local cache directory for files that must be downloaded before opening.
- `default`: fallback local command used when no file-type-specific command is configured.
- `text`: local command used to open text and source files.
- `image`: local command used to open image files.
- `video`: local command used to open video files.
- `audio`: local command used to open audio files.
- `pdf`: local command used to open PDF files.
Each command field is a YAML string array. The first item is the executable name and the remaining items are its arguments.
If the command array contains `{path}`, it will be replaced with the local file path or remote media URL. If `{path}` is not present, the path or URL is appended to the end of the command automatically.
For video files, the shell `open` command prefers opening a remote media URL directly. Other file types are downloaded to the local cache directory before opening.
### Default Open Behavior
If the `open` section is not configured, the builtin `open` uses platform defaults:
- macOS: `text -> TextEdit`, `image/pdf -> Preview`, `video/audio -> IINA`, others -> `open`
- Linux: `xdg-open`
- Windows: `cmd /c start`
### Example
```yml
proxy: http://127.0.0.1:7890
username: +861xxxxxxxxxx
password: your-password
open:
download_dir: ~/Downloads/pikpak-open
default: ["open"]
text: ["zed"]
image: ["open", "-a", "Preview"]
video: ["open", "-a", "IINA"]
audio: ["open", "-a", "IINA"]
pdf: ["open", "-a", "Preview"]
```
================================================
FILE: docs/config_zhCN.md
================================================
## 配置说明
CLI 会从 `config.yml` 中读取以下字段:
```yml
proxy:
username: xxx
password: xxx
open:
download_dir:
default: []
text: []
image: []
video: []
audio: []
pdf: []
```
### 基础字段
- `username`:你的 PikPak 账号用户名,或者带区号的手机号,例如 `+861xxxxxxxxxx`。
- `password`:你的 PikPak 账号密码。
- `proxy`:可选代理地址,例如 `http://127.0.0.1:7890`。
> `proxy` 必须包含 `://`。
### Open 配置
`open` 配置段用于交互式 shell 中的内置 `open` 命令。
- `download_dir`:可选的本地缓存目录,用于存放打开前需要先下载的文件。
- `default`:当没有匹配到具体文件类型配置时使用的兜底本地命令。
- `text`:用于打开文本文件和源码文件的本地命令。
- `image`:用于打开图片文件的本地命令。
- `video`:用于打开视频文件的本地命令。
- `audio`:用于打开音频文件的本地命令。
- `pdf`:用于打开 PDF 文件的本地命令。
每个命令字段都使用 YAML 字符串数组。第一个元素是可执行程序名,后面的元素是它的参数。
如果命令数组中包含 `{path}`,运行时会将它替换为本地文件路径或远端媒体 URL。如果没有写 `{path}`,程序会自动把路径或 URL 追加到命令末尾。
对于视频文件,shell 中的 `open` 命令会优先直接打开远端媒体 URL。其他文件类型会先下载到本地缓存目录,再调用本地程序打开。
### 默认打开行为
如果没有配置 `open`,内置 `open` 会使用各平台默认行为:
- macOS:`text -> TextEdit`,`image/pdf -> Preview`,`video/audio -> IINA`,其他类型 -> `open`
- Linux:`xdg-open`
- Windows:`cmd /c start`
### 示例
```yml
proxy: http://127.0.0.1:7890
username: +861xxxxxxxxxx
password: your-password
open:
download_dir: ~/Downloads/pikpak-open
default: ["open"]
text: ["zed"]
image: ["open", "-a", "Preview"]
video: ["open", "-a", "IINA"]
audio: ["open", "-a", "IINA"]
pdf: ["open", "-a", "Preview"]
```
================================================
FILE: go.mod
================================================
module github.com/52funny/pikpakcli
go 1.21.3
require (
github.com/52funny/pikpakhash v0.0.0-20231104025731-ef91a56eff9c
github.com/chzyer/readline v1.5.1
github.com/fatih/color v1.15.0
github.com/json-iterator/go v1.1.12
github.com/spf13/cobra v1.6.1
github.com/spf13/pflag v1.0.5
github.com/stretchr/testify v1.8.4
github.com/tidwall/gjson v1.14.4
github.com/vbauerster/mpb/v8 v8.7.2
gopkg.in/yaml.v2 v2.4.0
)
require (
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/inconshreveable/mousetrap v1.0.1 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.17 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.4.4 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.0 // indirect
golang.org/x/sys v0.16.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
================================================
FILE: go.sum
================================================
github.com/52funny/pikpakhash v0.0.0-20231104025731-ef91a56eff9c h1:ecJG8tmvgH6exVE4+I3rFPPA1Mk3/lNb8VZ6A7dtcyI=
github.com/52funny/pikpakhash v0.0.0-20231104025731-ef91a56eff9c/go.mod h1:YA/IS8XUrMTcrY+J4yOJ3CDgoyQ28NOOo4GnzOL6bTI=
github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow=
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM=
github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ=
github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI=
github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk=
github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04=
github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM=
github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/vbauerster/mpb/v8 v8.7.2 h1:SMJtxhNho1MV3OuFgS1DAzhANN1Ejc5Ct+0iSaIkB14=
github.com/vbauerster/mpb/v8 v8.7.2/go.mod h1:ZFnrjzspgDHoxYLGvxIruiNk73GNTPG4YHgVNpR10VY=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
================================================
FILE: internal/api/captcha_token.go
================================================
package api
import (
"bytes"
"crypto/md5"
"fmt"
"time"
"github.com/52funny/pikpakcli/internal/logx"
jsoniter "github.com/json-iterator/go"
)
const package_name = `com.pikcloud.pikpak`
const client_version = `1.21.0`
const md5_obj = `[{"alg":"md5","salt":""},{"alg":"md5","salt":"E32cSkYXC2bciKJGxRsE8ZgwmH\/YwkvpD6\/O9guSOa2irCwciH4xPHaH"},{"alg":"md5","salt":"QtqgfMgHP2TFl"},{"alg":"md5","salt":"zOKgHT56L7nIzFzDpUGhpWFrgP53m3G6ML"},{"alg":"md5","salt":"S"},{"alg":"md5","salt":"THxpsktzfFXizUv7DK1y\/N7NZ1WhayViluBEvAJJ8bA1Wr6"},{"alg":"md5","salt":"y9PXH3xGUhG\/zQI8CaapRw2LhldCaFM9CRlKpZXJvj+pifu"},{"alg":"md5","salt":"+RaaG7T8FRTI4cP019N5y9ofLyHE9ySFUr"},{"alg":"md5","salt":"6Pf1l8UTeuzYldGtb\/d"}]`
type md5Obj struct {
Alg string `json:"alg"`
Salt string `json:"salt"`
}
var md5Arr []md5Obj
func init() {
err := jsoniter.Unmarshal([]byte(md5_obj), &md5Arr)
if err != nil {
logx.Warn("api", err)
}
}
func (p *PikPak) AuthCaptchaToken(action string) error {
m := make(map[string]interface{})
m["action"] = action
m["captcha_token"] = p.CaptchaToken
m["client_id"] = clientID
m["device_id"] = p.DeviceId
ts := fmt.Sprintf("%d", time.Now().UnixMilli())
str := clientID + client_version + package_name + p.DeviceId + ts
for i := 0; i < len(md5Arr); i++ {
alg := md5Arr[i].Alg
salt := md5Arr[i].Salt
if alg == "md5" {
str = fmt.Sprintf("%x", md5.Sum([]byte(str+salt)))
}
}
// logrus.Debug("captcha_sign: ", "1."+str)
m["meta"] = map[string]string{
"captcha_sign": "1." + str,
"user_id": p.Sub,
"package_name": package_name,
"client_version": client_version,
"timestamp": ts,
}
m["redirect_uri"] = "ttps://api.mypikpak.com/v1/auth/callback"
bs, err := jsoniter.Marshal(m)
if err != nil {
return err
}
req, err := p.newRequest("POST", "https://user.mypikpak.com/v1/shield/captcha/init?client_id="+clientID, bytes.NewBuffer(bs))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json; charset=utf-8")
bs, err = p.sendRequest(req)
if err != nil {
return err
}
error_code := jsoniter.Get(bs, "error_code").ToInt()
if error_code != 0 {
return fmt.Errorf("auth captcha token error: %s", jsoniter.Get(bs, "error").ToString())
}
p.CaptchaToken = jsoniter.Get(bs, "captcha_token").ToString()
return nil
}
================================================
FILE: internal/api/constants.go
================================================
package api
const (
FileKindFolder = "drive#folder"
FileKindFile = "drive#file"
)
================================================
FILE: internal/api/download.go
================================================
package api
import (
"context"
"errors"
"fmt"
"io"
"net"
"net/http"
"os"
"strconv"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/vbauerster/mpb/v8"
)
const maxDownloadRetries = 3
var errRestartDownload = errors.New("restart download from beginning")
type retryableDownloadError struct {
err error
}
func (f *File) requestContext() context.Context {
if f != nil && f.ctx != nil {
return f.ctx
}
return context.Background()
}
func (e *retryableDownloadError) Error() string {
return e.err.Error()
}
func (e *retryableDownloadError) Unwrap() error {
return e.err
}
func retryableDownload(err error) error {
if err == nil {
return nil
}
return &retryableDownloadError{err: err}
}
func isRetryableDownloadError(err error) bool {
var target *retryableDownloadError
return errors.As(err, &target)
}
// Download file
func (f *File) Download(path string, bar *mpb.Bar) error {
expectedSize, err := strconv.ParseInt(f.Size, 10, 64)
if err != nil {
expectedSize = -1
}
var lastErr error
for attempt := 0; attempt < maxDownloadRetries; attempt++ {
lastErr = f.download(path, bar, expectedSize)
if lastErr == nil {
return nil
}
if !isRetryableDownloadError(lastErr) {
return lastErr
}
if attempt == maxDownloadRetries-1 {
break
}
logx.Warnf("transfer", "Download %s interrupted, retrying (%d/%d): %v", f.Name, attempt+1, maxDownloadRetries-1, lastErr)
}
return lastErr
}
func (f *File) download(path string, bar *mpb.Bar, expectedSize int64) error {
outFile, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return err
}
defer outFile.Close()
info, err := outFile.Stat()
if err != nil {
return err
}
offset := info.Size()
if expectedSize >= 0 && offset > expectedSize {
if err := outFile.Truncate(0); err != nil {
return err
}
offset = 0
}
if _, err := outFile.Seek(offset, io.SeekStart); err != nil {
return err
}
req, err := http.NewRequestWithContext(f.requestContext(), "GET", f.Links.ApplicationOctetStream.URL, nil)
if err != nil {
return err
}
req.Header.Set("User-Agent", userAgent)
if offset > 0 {
req.Header.Set("Range", fmt.Sprintf("bytes=%d-", offset))
if bar != nil {
bar.SetCurrent(offset)
}
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
return retryableDownload(err)
}
defer resp.Body.Close()
switch {
case offset > 0 && resp.StatusCode == http.StatusRequestedRangeNotSatisfiable:
if expectedSize >= 0 && offset == expectedSize {
if bar != nil {
bar.SetCurrent(expectedSize)
}
return nil
}
if err := outFile.Truncate(0); err != nil {
return err
}
if bar != nil {
bar.SetCurrent(0)
}
return retryableDownload(errRestartDownload)
case offset > 0 && resp.StatusCode == http.StatusOK:
logx.Warnf("transfer", "Resume file %s failed: server ignored range request, restarting from the beginning", f.Name)
if err := outFile.Truncate(0); err != nil {
return err
}
if bar != nil {
bar.SetCurrent(0)
}
return retryableDownload(errRestartDownload)
case offset > 0 && resp.StatusCode != http.StatusPartialContent:
if resp.StatusCode >= http.StatusInternalServerError || resp.StatusCode == http.StatusTooManyRequests {
return retryableDownload(fmt.Errorf("download request failed: %s", resp.Status))
}
return fmt.Errorf("download request failed: %s", resp.Status)
case offset == 0 && (resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusMultipleChoices):
if resp.StatusCode >= http.StatusInternalServerError || resp.StatusCode == http.StatusTooManyRequests {
return retryableDownload(fmt.Errorf("download request failed: %s", resp.Status))
}
return fmt.Errorf("download request failed: %s", resp.Status)
}
var reader io.ReadCloser
if bar != nil {
reader = bar.ProxyReader(resp.Body)
} else {
reader = resp.Body
}
defer reader.Close()
buf := make([]byte, 1024*128)
written, err := io.CopyBuffer(outFile, reader, buf)
if err != nil {
var netErr net.Error
if errors.Is(err, io.ErrUnexpectedEOF) || errors.As(err, &netErr) {
return retryableDownload(err)
}
return retryableDownload(err)
}
contentLengthHeader := resp.Header.Get("Content-Length")
if contentLengthHeader != "" {
contentLength, err := strconv.ParseInt(contentLengthHeader, 10, 64)
if err != nil {
return fmt.Errorf("parse content length failed: %w", err)
}
if contentLength != written {
return retryableDownload(fmt.Errorf("content length not equal to written"))
}
}
if expectedSize >= 0 && offset+written != expectedSize {
return retryableDownload(fmt.Errorf("download incomplete: got %d of %d bytes", offset+written, expectedSize))
}
return nil
}
================================================
FILE: internal/api/download_test.go
================================================
package api
import (
"bufio"
"fmt"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strconv"
"sync/atomic"
"testing"
"github.com/stretchr/testify/require"
)
func TestDownloadResumesAfterInterruptedTransfer(t *testing.T) {
content := []byte("hello world")
var requests atomic.Int32
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch requests.Add(1) {
case 1:
require.Empty(t, r.Header.Get("Range"))
hj, ok := w.(http.Hijacker)
require.True(t, ok)
conn, rw, err := hj.Hijack()
require.NoError(t, err)
defer conn.Close()
_, err = fmt.Fprintf(rw, "HTTP/1.1 200 OK\r\nContent-Length: %d\r\n\r\n", len(content))
require.NoError(t, err)
_, err = rw.Write(content[:5])
require.NoError(t, err)
require.NoError(t, rw.Flush())
case 2:
require.Equal(t, "bytes=5-", r.Header.Get("Range"))
remaining := content[5:]
w.Header().Set("Content-Length", strconv.Itoa(len(remaining)))
w.Header().Set("Content-Range", fmt.Sprintf("bytes 5-%d/%d", len(content)-1, len(content)))
w.WriteHeader(http.StatusPartialContent)
_, err := w.Write(remaining)
require.NoError(t, err)
default:
t.Fatalf("unexpected request count: %d", requests.Load())
}
}))
defer server.Close()
file := File{
FileStat: FileStat{
Name: "resume.bin",
Size: strconv.Itoa(len(content)),
},
}
file.Links.ApplicationOctetStream.URL = server.URL
target := filepath.Join(t.TempDir(), file.Name)
require.NoError(t, file.Download(target, nil))
downloaded, err := os.ReadFile(target)
require.NoError(t, err)
require.Equal(t, content, downloaded)
require.EqualValues(t, 2, requests.Load())
}
func TestDownloadRestartsWhenServerIgnoresRangeRequest(t *testing.T) {
content := []byte("hello world")
var requests atomic.Int32
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch requests.Add(1) {
case 1:
require.Equal(t, "bytes=5-", r.Header.Get("Range"))
case 2:
require.Empty(t, r.Header.Get("Range"))
default:
t.Fatalf("unexpected request count: %d", requests.Load())
}
w.Header().Set("Content-Length", strconv.Itoa(len(content)))
w.WriteHeader(http.StatusOK)
_, err := w.Write(content)
require.NoError(t, err)
}))
defer server.Close()
file := File{
FileStat: FileStat{
Name: "restart.bin",
Size: strconv.Itoa(len(content)),
},
}
file.Links.ApplicationOctetStream.URL = server.URL
target := filepath.Join(t.TempDir(), file.Name)
require.NoError(t, os.WriteFile(target, content[:5], 0644))
require.NoError(t, file.Download(target, nil))
downloaded, err := os.ReadFile(target)
require.NoError(t, err)
require.Equal(t, content, downloaded)
require.EqualValues(t, 2, requests.Load())
}
func TestDownloadTreatsSatisfiedRangeAsSuccess(t *testing.T) {
content := []byte("hello world")
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, "bytes=11-", r.Header.Get("Range"))
w.WriteHeader(http.StatusRequestedRangeNotSatisfiable)
}))
defer server.Close()
file := File{
FileStat: FileStat{
Name: "complete.bin",
Size: strconv.Itoa(len(content)),
},
}
file.Links.ApplicationOctetStream.URL = server.URL
target := filepath.Join(t.TempDir(), file.Name)
require.NoError(t, os.WriteFile(target, content, 0644))
require.NoError(t, file.Download(target, nil))
f, err := os.Open(target)
require.NoError(t, err)
defer f.Close()
reader := bufio.NewReader(f)
got, err := reader.Peek(len(content))
require.NoError(t, err)
require.Equal(t, content, got)
}
================================================
FILE: internal/api/file.go
================================================
package api
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net"
"net/url"
"strings"
"time"
"unsafe"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/tidwall/gjson"
)
type FileStat struct {
Kind string `json:"kind"`
ID string `json:"id"`
ParentID string `json:"parent_id"`
Name string `json:"name"`
UserID string `json:"user_id"`
Size string `json:"size"`
FileExtension string `json:"file_extension"`
MimeType string `json:"mime_type"`
CreatedTime time.Time `json:"created_time"`
ModifiedTime time.Time `json:"modified_time"`
IconLink string `json:"icon_link"`
ThumbnailLink string `json:"thumbnail_link"`
Md5Checksum string `json:"md5_checksum"`
Hash string `json:"hash"`
Phase string `json:"phase"`
}
type File struct {
FileStat
Revision string `json:"revision"`
Starred bool `json:"starred"`
WebContentLink string `json:"web_content_link"`
Links struct {
ApplicationOctetStream struct {
URL string `json:"url"`
Token string `json:"token"`
Expire time.Time `json:"expire"`
} `json:"application/octet-stream"`
} `json:"links"`
Audit struct {
Status string `json:"status"`
Message string `json:"message"`
Title string `json:"title"`
} `json:"audit"`
Medias []struct {
MediaID string `json:"media_id"`
MediaName string `json:"media_name"`
Video interface{} `json:"video"`
Link struct {
URL string `json:"url"`
Token string `json:"token"`
Expire time.Time `json:"expire"`
} `json:"link"`
NeedMoreQuota bool `json:"need_more_quota"`
VipTypes []interface{} `json:"vip_types"`
RedirectLink string `json:"redirect_link"`
IconLink string `json:"icon_link"`
IsDefault bool `json:"is_default"`
Priority int `json:"priority"`
IsOrigin bool `json:"is_origin"`
ResolutionName string `json:"resolution_name"`
IsVisible bool `json:"is_visible"`
Category string `json:"category"`
} `json:"medias"`
Trashed bool `json:"trashed"`
DeleteTime string `json:"delete_time"`
OriginalURL string `json:"original_url"`
Params struct {
Platform string `json:"platform"`
PlatformIcon string `json:"platform_icon"`
} `json:"params"`
OriginalFileIndex int `json:"original_file_index"`
Space string `json:"space"`
Apps []interface{} `json:"apps"`
Writable bool `json:"writable"`
FolderType string `json:"folder_type"`
Collection interface{} `json:"collection"`
ctx context.Context
}
type fileListResult struct {
NextPageToken string `json:"next_page_token"`
Files []FileStat `json:"files"`
}
const maxListRetries = 3
func (p *PikPak) GetFolderFileStatList(parentId string) ([]FileStat, error) {
filters := `{"trashed":{"eq":false}}`
query := url.Values{}
query.Add("thumbnail_size", "SIZE_MEDIUM")
query.Add("limit", "500")
query.Add("parent_id", parentId)
query.Add("with_audit", "false")
query.Add("filters", filters)
fileList := make([]FileStat, 0)
for {
bs, err := p.getFolderFileStatPage(query)
if err != nil {
return fileList, err
}
error_code := gjson.Get(*(*string)(unsafe.Pointer(&bs)), "error_code").Int()
if error_code == 9 {
err = p.AuthCaptchaToken("GET:/drive/v1/files")
if err != nil {
return fileList, err
}
}
var result fileListResult
err = json.Unmarshal(bs, &result)
if err != nil {
return fileList, err
}
fileList = append(fileList, result.Files...)
if result.NextPageToken == "" {
break
}
query.Set("page_token", result.NextPageToken)
}
return fileList, nil
}
func (p *PikPak) getFolderFileStatPage(query url.Values) ([]byte, error) {
var lastErr error
for attempt := 0; attempt < maxListRetries; attempt++ {
req, err := p.newRequest("GET", "https://api-drive.mypikpak.com/drive/v1/files?"+query.Encode(), nil)
if err != nil {
return nil, err
}
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("Content-Type", "application/json")
bs, err := p.sendRequest(req)
if err == nil {
return bs, nil
}
lastErr = err
if !isRetryableListError(err) || attempt == maxListRetries-1 {
break
}
logx.Warnf("transfer", "List folder interrupted, retrying (%d/%d): %v", attempt+1, maxListRetries-1, err)
time.Sleep(time.Duration(attempt+1) * 200 * time.Millisecond)
}
return nil, lastErr
}
func isRetryableListError(err error) bool {
if err == nil {
return false
}
if errors.Is(err, io.ErrUnexpectedEOF) {
return true
}
var netErr net.Error
if errors.As(err, &netErr) {
return true
}
message := strings.ToLower(err.Error())
return strings.Contains(message, "unexpected eof") ||
strings.Contains(message, "connection reset by peer") ||
strings.Contains(message, "connection closed") ||
strings.Contains(message, "broken pipe")
}
// Find FileState similar to name in the parentId directory
func (p *PikPak) GetFileStat(parentId string, name string) (FileStat, error) {
stats, err := p.GetFolderFileStatList(parentId)
if err != nil {
return FileStat{}, err
}
for _, stat := range stats {
if stat.Name == name {
return stat, nil
}
}
return FileStat{}, errors.New("file not found")
}
func (p *PikPak) GetFileByPath(path string) (FileStat, error) {
parentPath, name := utils.SplitRemotePath(path)
if name == "" {
return FileStat{}, errors.New("cannot get info of root directory")
}
parentID := ""
var err error
if parentPath != "" {
parentID, err = p.GetPathFolderId(parentPath)
if err != nil {
return FileStat{}, err
}
}
return p.GetFileStat(parentID, name)
}
func (p *PikPak) GetFile(fileId string) (File, error) {
var fileInfo File
query := url.Values{}
query.Add("thumbnail_size", "SIZE_MEDIUM")
req, err := p.newRequest("GET", "https://api-drive.mypikpak.com/drive/v1/files/"+fileId+"?"+query.Encode(), nil)
if err != nil {
return fileInfo, err
}
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("X-Device-Id", p.DeviceId)
bs, err := p.sendRequest(req)
if err != nil {
return fileInfo, err
}
error_code := gjson.Get(*(*string)(unsafe.Pointer(&bs)), "error_code").Int()
if error_code != 0 {
if error_code == 9 {
err = p.AuthCaptchaToken("GET:/drive/v1/files")
if err != nil {
return fileInfo, err
}
}
err = errors.New(gjson.Get(*(*string)(unsafe.Pointer(&bs)), "error").String() + ":" + fileId)
return fileInfo, err
}
err = json.Unmarshal(bs, &fileInfo)
if err != nil {
return fileInfo, err
}
fileInfo.ctx = p.requestContext()
return fileInfo, err
}
func (p *PikPak) DeleteFile(fileId string) error {
START:
req, err := p.newRequest("DELETE", "https://api-drive.mypikpak.com/drive/v1/files/"+fileId, nil)
if err != nil {
return err
}
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("X-Device-Id", p.DeviceId)
bs, err := p.sendRequest(req)
if err != nil {
return err
}
error_code := gjson.GetBytes(bs, "error_code").Int()
if error_code != 0 {
if error_code == 9 {
err = p.AuthCaptchaToken("DELETE:/drive/v1/files")
if err != nil {
return err
}
goto START
}
return fmt.Errorf("%s: %s", gjson.GetBytes(bs, "error").String(), fileId)
}
return nil
}
func (p *PikPak) Rename(fileId string, newName string) error {
if newName == "" {
return errors.New("new name cannot be empty")
}
apiURL := "https://api-drive.mypikpak.com/drive/v1/files/" + fileId
body := map[string]string{"name": newName}
jsonBody, err := json.Marshal(body)
if err != nil {
return err
}
START:
req, err := p.newRequest("PATCH", apiURL, bytes.NewBuffer(jsonBody))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("X-Device-Id", p.DeviceId)
bs, err := p.sendRequest(req)
if err != nil {
return err
}
errorCode := gjson.GetBytes(bs, "error_code").Int()
if errorCode != 0 {
if errorCode == 9 {
err = p.AuthCaptchaToken("PATCH:/drive/v1/files")
if err != nil {
return err
}
goto START
}
return fmt.Errorf("%s: %s", gjson.GetBytes(bs, "error").String(), fileId)
}
return nil
}
================================================
FILE: internal/api/file_test.go
================================================
package api
import (
"context"
"errors"
"io"
"net"
"testing"
"github.com/stretchr/testify/assert"
)
type fakeNetError struct{}
func (fakeNetError) Error() string { return "i/o timeout" }
func (fakeNetError) Timeout() bool { return true }
func (fakeNetError) Temporary() bool { return true }
func TestIsRetryableListError(t *testing.T) {
assert.True(t, isRetryableListError(io.ErrUnexpectedEOF))
assert.True(t, isRetryableListError(errors.New("unexpected EOF")))
assert.True(t, isRetryableListError(fakeNetError{}))
assert.True(t, isRetryableListError(errors.New("read: connection reset by peer")))
assert.False(t, isRetryableListError(errors.New("permission denied")))
assert.False(t, isRetryableListError(nil))
}
func TestFakeNetErrorImplementsNetError(t *testing.T) {
var err net.Error = fakeNetError{}
assert.True(t, err.Timeout())
assert.True(t, err.Temporary())
}
func TestPikPakWithContext(t *testing.T) {
base := NewPikPak("user", "pass")
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
derived := base.WithContext(ctx)
assert.NotNil(t, derived)
assert.NotSame(t, &base, derived)
assert.Equal(t, ctx, derived.requestContext())
assert.NotEqual(t, ctx, base.requestContext())
}
================================================
FILE: internal/api/folder.go
================================================
package api
import (
"bytes"
"fmt"
"net/url"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
jsoniter "github.com/json-iterator/go"
"github.com/tidwall/gjson"
)
// 获取文件夹 id
// dir 可以包括 /.
// 若以 / 开头,函数会去除 /, 且会从 parent 目录开始查找
func (p *PikPak) GetDeepFolderId(parentId string, dirPath string) (string, error) {
dirPath = utils.Slash(dirPath)
if dirPath == "" {
return parentId, nil
}
dirS := utils.SplitSeparator(dirPath)
for _, dir := range dirS {
id, err := p.GetFolderId(parentId, dir)
if err != nil {
return "", err
}
parentId = id
}
return parentId, nil
}
func (p *PikPak) GetPathFolderId(dirPath string) (string, error) {
return p.GetDeepFolderId("", dirPath)
}
// 获取文件夹 id
// dir 不能包括 /
func (p *PikPak) GetFolderId(parentId string, dir string) (string, error) {
// slash the dir path
dir = utils.Slash(dir)
value := url.Values{}
value.Add("parent_id", parentId)
value.Add("page_token", "")
value.Add("with_audit", "false")
value.Add("thumbnail_size", "SIZE_LARGE")
value.Add("limit", "500")
for {
req, err := p.newRequest("GET", fmt.Sprintf("https://api-drive.mypikpak.com/drive/v1/files?"+value.Encode()), nil)
if err != nil {
return "", err
}
req.Header.Set("Country", "CN")
req.Header.Set("X-Peer-Id", p.DeviceId)
req.Header.Set("X-User-Region", "1")
req.Header.Set("X-Alt-Capability", "3")
req.Header.Set("X-Client-Version-Code", "10083")
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
bs, err := p.sendRequest(req)
if err != nil {
return "", err
}
files := gjson.GetBytes(bs, "files").Array()
for _, file := range files {
kind := file.Get("kind").String()
name := file.Get("name").String()
trashed := file.Get("trashed").Bool()
if kind == FileKindFolder && name == dir && !trashed {
return file.Get("id").String(), nil
}
}
nextToken := gjson.GetBytes(bs, "next_page_token").String()
if nextToken == "" {
break
}
value.Set("page_token", nextToken)
}
return "", ErrNotFoundFolder
}
func (p *PikPak) GetDeepFolderOrCreateId(parentId string, dirPath string) (string, error) {
dirPath = utils.Slash(dirPath)
if dirPath == "" || dirPath == "." {
return parentId, nil
}
dirS := utils.SplitSeparator(dirPath)
for _, dir := range dirS {
id, err := p.GetFolderId(parentId, dir)
if err != nil {
logx.Warn("api", "dir ", err)
if err == ErrNotFoundFolder {
createId, err := p.CreateFolder(parentId, dir)
if err != nil {
return "", err
} else {
logx.Debug("api", "create dir: ", dir)
parentId = createId
}
} else {
return "", err
}
} else {
parentId = id
}
}
return parentId, nil
}
// Create new folder in parent folder
// parentId is parent folder id
func (p *PikPak) CreateFolder(parentId, dir string) (string, error) {
m := map[string]interface{}{
"kind": FileKindFolder,
"parent_id": parentId,
"name": dir,
}
bs, err := jsoniter.Marshal(&m)
if err != nil {
return "", err
}
START:
req, err := p.newRequest("POST", "https://api-drive.mypikpak.com/drive/v1/files", bytes.NewBuffer(bs))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json; charset=utf-8")
req.Header.Set("Product_flavor_name", "cha")
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("X-Client-Version-Code", "10083")
req.Header.Set("X-Peer-Id", p.DeviceId)
req.Header.Set("X-User-Region", "1")
req.Header.Set("X-Alt-Capability", "3")
req.Header.Set("Country", "CN")
bs, err = p.sendRequest(req)
if err != nil {
return "", err
}
error_code := gjson.GetBytes(bs, "error_code").Int()
if error_code != 0 {
if error_code == 9 {
err := p.AuthCaptchaToken("POST:/drive/v1/files")
if err != nil {
return "", err
}
goto START
}
return "", fmt.Errorf("create folder error: %s", jsoniter.Get(bs, "error").ToString())
}
id := gjson.GetBytes(bs, "file.id").String()
return id, nil
}
================================================
FILE: internal/api/glob.go
================================================
package api
import (
"fmt"
"path"
"strings"
)
type remotePatternProvider interface {
GetPathFolderId(dirPath string) (string, error)
GetFolderFileStatList(parentId string) ([]FileStat, error)
}
func ExpandRemotePatterns(p remotePatternProvider, basePath string, patterns []string, keepRelative bool) ([]string, error) {
expanded := make([]string, 0, len(patterns))
for _, pattern := range patterns {
matches, err := expandRemotePattern(p, basePath, pattern, keepRelative)
if err != nil {
return nil, err
}
expanded = append(expanded, matches...)
}
return expanded, nil
}
func expandRemotePattern(p remotePatternProvider, basePath string, pattern string, keepRelative bool) ([]string, error) {
if !hasRemoteWildcard(pattern) {
return []string{pattern}, nil
}
resolvedPattern := path.Clean(pattern)
if !path.IsAbs(resolvedPattern) {
resolvedPattern = path.Clean(path.Join("/", basePath, pattern))
}
parentPath := path.Dir(resolvedPattern)
if parentPath == "." {
parentPath = "/"
}
parentID := ""
var err error
if parentPath != "/" {
parentID, err = p.GetPathFolderId(parentPath)
if err != nil {
return nil, err
}
}
files, err := p.GetFolderFileStatList(parentID)
if err != nil {
return nil, err
}
matches := make([]string, 0)
namePattern := path.Base(resolvedPattern)
for _, file := range files {
matched, err := path.Match(namePattern, file.Name)
if err != nil {
return nil, fmt.Errorf("invalid wildcard pattern %s: %w", pattern, err)
}
if !matched {
continue
}
matchPath := path.Join(parentPath, file.Name)
if keepRelative && !path.IsAbs(pattern) {
matches = append(matches, relativeRemotePath(basePath, matchPath))
continue
}
matches = append(matches, matchPath)
}
if len(matches) == 0 {
return nil, fmt.Errorf("no matches found for %s", pattern)
}
return matches, nil
}
func hasRemoteWildcard(value string) bool {
return strings.ContainsAny(value, "*?[")
}
func relativeRemotePath(basePath string, fullPath string) string {
base := path.Clean(basePath)
full := path.Clean(fullPath)
if base == "." || base == "" || base == "/" {
return strings.TrimPrefix(full, "/")
}
prefix := base + "/"
if strings.HasPrefix(full, prefix) {
return strings.TrimPrefix(full, prefix)
}
return full
}
================================================
FILE: internal/api/glob_test.go
================================================
package api
import (
"errors"
"testing"
"github.com/stretchr/testify/require"
)
type fakeRemotePatternProvider struct {
getPathFolderID func(dirPath string) (string, error)
getFolderFileStatList func(parentId string) ([]FileStat, error)
}
func (f fakeRemotePatternProvider) GetPathFolderId(dirPath string) (string, error) {
return f.getPathFolderID(dirPath)
}
func (f fakeRemotePatternProvider) GetFolderFileStatList(parentId string) ([]FileStat, error) {
return f.getFolderFileStatList(parentId)
}
func TestExpandRemotePatternsReturnsAbsoluteMatches(t *testing.T) {
provider := fakeRemotePatternProvider{
getPathFolderID: func(dirPath string) (string, error) {
require.Equal(t, "/Movies", dirPath)
return "movies-id", nil
},
getFolderFileStatList: func(parentId string) ([]FileStat, error) {
require.Equal(t, "movies-id", parentId)
return []FileStat{
{Name: "a.mp4"},
{Name: "b.mp4"},
{Name: "note.txt"},
}, nil
},
}
matches, err := ExpandRemotePatterns(provider, "/Movies", []string{"*.mp4"}, false)
require.NoError(t, err)
require.Equal(t, []string{"/Movies/a.mp4", "/Movies/b.mp4"}, matches)
}
func TestExpandRemotePatternsCanKeepRelativeMatches(t *testing.T) {
provider := fakeRemotePatternProvider{
getPathFolderID: func(dirPath string) (string, error) {
require.Equal(t, "/Movies/Kids", dirPath)
return "kids-id", nil
},
getFolderFileStatList: func(parentId string) ([]FileStat, error) {
require.Equal(t, "kids-id", parentId)
return []FileStat{
{Name: "a.srt"},
{Name: "b.srt"},
}, nil
},
}
matches, err := ExpandRemotePatterns(provider, "/Movies", []string{"Kids/*.srt"}, true)
require.NoError(t, err)
require.Equal(t, []string{"Kids/a.srt", "Kids/b.srt"}, matches)
}
func TestExpandRemotePatternsReturnsNoMatchError(t *testing.T) {
provider := fakeRemotePatternProvider{
getPathFolderID: func(dirPath string) (string, error) {
return "movies-id", nil
},
getFolderFileStatList: func(parentId string) ([]FileStat, error) {
return []FileStat{{Name: "note.txt"}}, nil
},
}
_, err := ExpandRemotePatterns(provider, "/Movies", []string{"*.mp4"}, false)
require.EqualError(t, err, "no matches found for *.mp4")
}
func TestExpandRemotePatternsPropagatesLookupErrors(t *testing.T) {
provider := fakeRemotePatternProvider{
getPathFolderID: func(dirPath string) (string, error) {
return "", errors.New("lookup failed")
},
getFolderFileStatList: func(parentId string) ([]FileStat, error) {
return nil, errors.New("should not list")
},
}
_, err := ExpandRemotePatterns(provider, "/Movies", []string{"Kids/*.mp4"}, false)
require.EqualError(t, err, "lookup failed")
}
================================================
FILE: internal/api/pikpak.go
================================================
package api
import (
"bytes"
"context"
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"net/http"
"net/url"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/logx"
jsoniter "github.com/json-iterator/go"
)
const userAgent = `ANDROID-com.pikcloud.pikpak/1.21.0`
const clientID = `YNxT9w7GMdWvEOKa`
const clientSecret = `dbw2OtmVEeuUvIptb1Coyg`
type PikPak struct {
Account string `json:"account"`
Password string `json:"password"`
JwtToken string `json:"token"`
refreshToken string
CaptchaToken string `json:"captchaToken"`
Sub string `json:"userId"`
DeviceId string `json:"deviceId"`
RefreshSecond int64 `json:"refreshSecond"`
client *http.Client
ctx context.Context
}
func NewPikPak(account, password string) PikPak {
return NewPikPakWithContext(context.Background(), account, password)
}
func NewPikPakWithContext(ctx context.Context, account, password string) PikPak {
if ctx == nil {
ctx = context.Background()
}
client := &http.Client{
Transport: &http.Transport{
Proxy: http.ProxyFromEnvironment,
},
}
if conf.Config.UseProxy() {
proxyUrl, err := url.Parse(conf.Config.Proxy)
if err != nil {
logx.Warn("api", "url parse proxy error", err)
}
p := http.ProxyURL(proxyUrl)
client.Transport = &http.Transport{
Proxy: p,
}
http.DefaultClient.Transport = &http.Transport{
Proxy: p,
}
}
n := md5.Sum([]byte(account))
return PikPak{
Account: account,
Password: password,
DeviceId: hex.EncodeToString(n[:]),
client: client,
ctx: ctx,
}
}
func (p *PikPak) requestContext() context.Context {
if p != nil && p.ctx != nil {
return p.ctx
}
return context.Background()
}
func (p *PikPak) WithContext(ctx context.Context) *PikPak {
if p == nil {
return nil
}
clone := *p
if ctx == nil {
ctx = context.Background()
}
clone.ctx = ctx
return &clone
}
func (p *PikPak) newRequest(method, url string, body io.Reader) (*http.Request, error) {
return http.NewRequestWithContext(p.requestContext(), method, url, body)
}
// login performs the full credential-based login flow.
func (p *PikPak) login() error {
captchaToken, err := p.getCaptchaToken()
if err != nil {
return err
}
m := make(map[string]string)
m["client_id"] = clientID
m["client_secret"] = clientSecret
m["grant_type"] = "password"
m["username"] = p.Account
m["password"] = p.Password
m["captcha_token"] = captchaToken
bs, err := jsoniter.Marshal(&m)
if err != nil {
return err
}
req, err := p.newRequest("POST", "https://user.mypikpak.com/v1/auth/signin", bytes.NewBuffer(bs))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json; charset=utf-8")
bs, err = p.sendRequest(req)
if err != nil {
return err
}
error_code := jsoniter.Get(bs, "error_code").ToInt()
if error_code != 0 {
return fmt.Errorf("login error: %v", jsoniter.Get(bs, "error").ToString())
}
p.JwtToken = jsoniter.Get(bs, "access_token").ToString()
p.refreshToken = jsoniter.Get(bs, "refresh_token").ToString()
p.Sub = jsoniter.Get(bs, "sub").ToString()
p.RefreshSecond = jsoniter.Get(bs, "expires_in").ToInt64()
return nil
}
func (p *PikPak) getCaptchaToken() (string, error) {
m := make(map[string]any)
m["client_id"] = clientID
m["device_id"] = p.DeviceId
m["action"] = "POST:https://user.mypikpak.com/v1/auth/signin"
m["meta"] = map[string]string{
"username": p.Account,
}
body, err := jsoniter.Marshal(&m)
if err != nil {
return "", err
}
req, err := p.newRequest("POST", "https://user.mypikpak.com/v1/shield/captcha/init", bytes.NewBuffer(body))
if err != nil {
return "", err
}
req.Header.Add("Content-Type", "application/json")
bs, err := p.sendRequest(req)
if err != nil {
return "", err
}
error_code := jsoniter.Get(bs, "error_code").ToInt()
if error_code != 0 {
return "", fmt.Errorf("get captcha error: %v", jsoniter.Get(bs, "error").ToString())
}
return jsoniter.Get(bs, "captcha_token").ToString(), nil
}
func (p *PikPak) sendRequest(req *http.Request) ([]byte, error) {
p.setHeader(req)
resp, err := p.client.Do(req)
if err != nil {
return nil, err
}
bs, err := io.ReadAll(resp.Body)
defer resp.Body.Close()
if err != nil {
return nil, err
}
return bs, nil
}
func (p *PikPak) setHeader(req *http.Request) {
if p.JwtToken != "" {
req.Header.Set("Authorization", "Bearer "+p.JwtToken)
}
req.Header.Set("User-Agent", userAgent)
req.Header.Set("X-Device-Id", p.DeviceId)
}
// Login reuses a cached session first and falls back to full login when needed.
func (p *PikPak) Login() error {
if err := p.loadSession(); err == nil {
if !p.isTokenExpired() {
logx.Debugln("session", "session valid, skip login")
return nil
}
logx.Debugln("session", "access_token expired, trying refresh_token")
if err = p.RefreshToken(); err == nil {
p.saveSessionBestEffort()
return nil
}
logx.Debugln("session", "refresh failed, fallback to full login:", err)
} else {
logx.Debugln("session", "load session failed, fallback to full login:", err)
}
if err := p.login(); err != nil {
return err
}
p.saveSessionBestEffort()
return nil
}
================================================
FILE: internal/api/quota.go
================================================
package api
import (
"strconv"
jsoniter "github.com/json-iterator/go"
)
type QuotaMessage struct {
Kind string `json:"kind"`
Quota Quota `json:"quota"`
ExpiresAt string `json:"expires_at"`
Quotas Quotas `json:"quotas"`
}
type Quota struct {
Kind string `json:"kind"`
Limit string `json:"limit"`
Usage string `json:"usage"`
UsageInTrash string `json:"usage_in_trash"`
PlayTimesLimit string `json:"play_times_limit"`
PlayTimesUsage string `json:"play_times_usage"`
}
// Remaining returns the unused quota amount.
func (q Quota) Remaining() (int64, error) {
limit, err := strconv.ParseInt(q.Limit, 10, 64)
if err != nil {
return 0, err
}
usage, err := strconv.ParseInt(q.Usage, 10, 64)
if err != nil {
return 0, err
}
return limit - usage, nil
}
type Quotas struct {
CloudDownload Quota `json:"cloud_download"`
}
type TransferMessage struct {
Transfer TransferQuotaCollection `json:"transfer"`
Base TransferQuotaBase `json:"base"`
}
type TransferQuotaCollection struct {
Offline TransferQuota `json:"offline"`
Download TransferQuota `json:"download"`
Upload TransferQuota `json:"upload"`
}
type TransferQuotaBase struct {
Offline TransferQuota `json:"offline"`
Download TransferQuota `json:"download"`
Upload TransferQuota `json:"upload"`
}
type TransferQuota struct {
Info string `json:"info"`
TotalAssets int64 `json:"total_assets"`
Assets int64 `json:"assets"`
Size int64 `json:"size"`
}
func (q TransferQuota) Remaining() int64 {
return q.TotalAssets - q.Assets
}
// GetQuota get cloud quota
func (p *PikPak) GetQuota() (QuotaMessage, error) {
req, err := p.newRequest("GET", "https://api-drive.mypikpak.com/drive/v1/about", nil)
if err != nil {
return QuotaMessage{}, err
}
bs, err := p.sendRequest(req)
if err != nil {
return QuotaMessage{}, err
}
var quotaMessage QuotaMessage
err = jsoniter.Unmarshal(bs, "aMessage)
if err != nil {
return QuotaMessage{}, err
}
return quotaMessage, nil
}
// GetTransferQuota gets monthly transfer quota.
func (p *PikPak) GetTransferQuota() (TransferMessage, error) {
req, err := p.newRequest("GET", "https://api-drive.mypikpak.com/vip/v1/quantity/list?type=transfer&limit=200", nil)
if err != nil {
return TransferMessage{}, err
}
bs, err := p.sendRequest(req)
if err != nil {
return TransferMessage{}, err
}
var transferMessage TransferMessage
err = jsoniter.Unmarshal(bs, &transferMessage)
if err != nil {
return TransferMessage{}, err
}
return transferMessage, nil
}
================================================
FILE: internal/api/quota_test.go
================================================
package api
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQuotaRemaining(t *testing.T) {
remaining, err := (Quota{Limit: "10", Usage: "3"}).Remaining()
require.NoError(t, err)
assert.Equal(t, int64(7), remaining)
}
func TestQuotaRemainingInvalid(t *testing.T) {
_, err := (Quota{Limit: "bad", Usage: "3"}).Remaining()
require.Error(t, err)
}
func TestTransferQuotaRemaining(t *testing.T) {
remaining := (TransferQuota{TotalAssets: 10, Assets: 3}).Remaining()
assert.Equal(t, int64(7), remaining)
}
================================================
FILE: internal/api/refresh_token.go
================================================
package api
import (
"bytes"
"fmt"
"github.com/52funny/pikpakcli/internal/logx"
jsoniter "github.com/json-iterator/go"
"github.com/tidwall/gjson"
)
func (p *PikPak) RefreshToken() error {
url := "https://user.mypikpak.com/v1/auth/token"
m := map[string]string{
"client_id": clientID,
"client_secret": clientSecret,
"grant_type": "refresh_token",
"refresh_token": p.refreshToken,
}
bs, err := jsoniter.Marshal(&m)
if err != nil {
return err
}
req, err := p.newRequest("POST", url, bytes.NewBuffer(bs))
if err != nil {
return err
}
bs, err = p.sendRequest(req)
if err != nil {
return err
}
error_code := gjson.GetBytes(bs, "error_code").Int()
if error_code != 0 {
// refresh token failed
if error_code == 4126 {
// Retry with the full login flow when the refresh token is no longer valid.
return p.login()
}
return fmt.Errorf("refresh token error message: %d", gjson.GetBytes(bs, "error").Int())
}
// logrus.Debug("refresh: ", string(bs))
p.JwtToken = gjson.GetBytes(bs, "access_token").String()
p.refreshToken = gjson.GetBytes(bs, "refresh_token").String()
p.RefreshSecond = gjson.GetBytes(bs, "expires_in").Int()
logx.Debugln("session", "refresh token succeeded")
return nil
}
================================================
FILE: internal/api/session.go
================================================
package api
import (
"crypto/md5"
"encoding/hex"
"encoding/json"
"fmt"
"os"
"path/filepath"
"time"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
)
const sessionExpirySkew = 5 * 60
// sessionData is the on-disk representation of cached auth tokens.
type sessionData struct {
JwtToken string `json:"access_token"`
RefreshToken string `json:"refresh_token"`
Sub string `json:"sub"`
// ExpiresAt stores the access token expiration time as a Unix timestamp in seconds.
ExpiresAt int64 `json:"expires_at"`
}
// saveSession persists the current token state to the local session file.
func (p *PikPak) saveSession() error {
path, err := sessionFile(p.Account)
if err != nil {
return err
}
if err := utils.CreateDirIfNotExist(filepath.Dir(path)); err != nil {
return fmt.Errorf("create session dir error: %w", err)
}
data := sessionData{
JwtToken: p.JwtToken,
RefreshToken: p.refreshToken,
Sub: p.Sub,
// Treat the token as expired slightly early to avoid using a near-expiry session.
ExpiresAt: time.Now().Unix() + p.RefreshSecond - sessionExpirySkew,
}
bs, err := json.Marshal(data)
if err != nil {
return fmt.Errorf("marshal session error: %w", err)
}
if err = os.WriteFile(path, bs, 0600); err != nil {
return fmt.Errorf("write session file error: %w", err)
}
logx.Debugln("session", "session saved to", path)
return nil
}
// loadSession restores cached tokens from disk into the current client.
func (p *PikPak) loadSession() error {
path, err := sessionFile(p.Account)
if err != nil {
return err
}
bs, err := os.ReadFile(path)
if err != nil {
return fmt.Errorf("read session file error: %w", err)
}
var data sessionData
if err = json.Unmarshal(bs, &data); err != nil {
return fmt.Errorf("unmarshal session error: %w", err)
}
p.JwtToken = data.JwtToken
p.refreshToken = data.RefreshToken
p.Sub = data.Sub
p.RefreshSecond = data.ExpiresAt - time.Now().Unix()
logx.Debugln("session", "session loaded from", path)
return nil
}
// isTokenExpired reports whether the cached access token should be treated as expired.
func (p *PikPak) isTokenExpired() bool {
return p.RefreshSecond <= 0
}
func (p *PikPak) saveSessionBestEffort() {
if err := p.saveSession(); err != nil {
logx.Warn("session", "save session failed:", err)
}
}
func sessionFile(account string) (string, error) {
configDir, err := os.UserConfigDir()
if err != nil {
return "", fmt.Errorf("get config dir error: %w", err)
}
hash := md5.Sum([]byte(account))
filename := fmt.Sprintf("session_%s.json", hex.EncodeToString(hash[:]))
return filepath.Join(configDir, "pikpakcli", filename), nil
}
================================================
FILE: internal/api/sha.go
================================================
package api
import (
"bytes"
"fmt"
jsoniter "github.com/json-iterator/go"
)
func (p *PikPak) CreateShaFile(parentId, fileName, size, sha string) error {
m := map[string]interface{}{
"body": map[string]string{
"duration": "",
"width": "",
"height": "",
},
"kind": FileKindFile,
"name": fileName,
"size": size,
"hash": sha,
"upload_type": "UPLOAD_TYPE_RESUMABLE",
"objProvider": map[string]string{
"provider": "UPLOAD_TYPE_UNKNOWN",
},
}
if parentId != "" {
m["parent_id"] = parentId
}
bs, err := jsoniter.Marshal(&m)
if err != nil {
return err
}
START:
req, err := p.newRequest("POST", "https://api-drive.mypikpak.com/drive/v1/files", bytes.NewBuffer(bs))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json; charset=utf-8")
req.Header.Set("Product_flavor_name", "cha")
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("X-Client-Version-Code", "10083")
req.Header.Set("X-Peer-Id", p.DeviceId)
req.Header.Set("X-User-Region", "1")
req.Header.Set("X-Alt-Capability", "3")
req.Header.Set("Country", "CN")
bs, err = p.sendRequest(req)
if err != nil {
return err
}
error_code := jsoniter.Get(bs, "error_code").ToInt()
if error_code != 0 {
if error_code == 9 {
err := p.AuthCaptchaToken("POST:/drive/v1/files")
if err != nil {
return err
}
goto START
}
return fmt.Errorf("upload file error: %s", jsoniter.Get(bs, "error").ToString())
}
file := jsoniter.Get(bs, "file")
phase := file.Get("phase").ToString()
if phase == "PHASE_TYPE_COMPLETE" {
return nil
} else {
return fmt.Errorf("create file error: %s", phase)
}
}
================================================
FILE: internal/api/upload.go
================================================
package api
import (
"bytes"
"context"
"crypto/hmac"
"crypto/sha1"
"encoding/base64"
"encoding/xml"
"errors"
"fmt"
"io"
"math"
"net/http"
"net/url"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakhash"
jsoniter "github.com/json-iterator/go"
)
type OssArgs struct {
Bucket string `json:"bucket"`
AccessKeyId string `json:"access_key_id"`
AccessKeySecret string `json:"access_key_secret"`
EndPoint string `json:"endpoint"`
Key string `json:"key"`
SecurityToken string `json:"security_token"`
}
// 256k
var defaultChunkSize int64 = 1 << 18
var Concurrent int64 = 1 << 4
var ErrNotFoundFolder = errors.New("not found pikpak folder")
func (p *PikPak) UploadFile(parentId, path string) error {
fileName := filepath.Base(path)
fileState, err := os.Stat(path)
if err != nil {
return err
}
fileSize := fileState.Size()
ph := pikpakhash.Default()
hash, err := ph.HashFromPath(path)
if err != nil {
return err
}
m := map[string]interface{}{
"body": map[string]string{
"duration": "",
"width": "",
"height": "",
},
"kind": FileKindFile,
"name": fileName,
"size": fmt.Sprintf("%d", fileSize),
"hash": hash,
"upload_type": "UPLOAD_TYPE_RESUMABLE",
"objProvider": map[string]string{
"provider": "UPLOAD_TYPE_UNKNOWN",
},
}
if parentId != "" {
m["parent_id"] = parentId
}
bs, err := jsoniter.Marshal(&m)
if err != nil {
return err
}
START:
req, err := p.newRequest("POST", "https://api-drive.mypikpak.com/drive/v1/files", bytes.NewBuffer(bs))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json; charset=utf-8")
req.Header.Set("Product_flavor_name", "cha")
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("X-Client-Version-Code", "10083")
req.Header.Set("X-Peer-Id", p.DeviceId)
req.Header.Set("X-User-Region", "1")
req.Header.Set("X-Alt-Capability", "3")
req.Header.Set("Country", "CN")
bs, err = p.sendRequest(req)
if err != nil {
return err
}
error_code := jsoniter.Get(bs, "error_code").ToInt()
if error_code != 0 {
if error_code == 9 {
err = p.AuthCaptchaToken("POST:/drive/v1/files")
if err != nil {
return err
}
goto START
}
return fmt.Errorf("upload file error: %s", jsoniter.Get(bs, "error").ToString())
}
// logrus.Debug(string(bs))
file := jsoniter.Get(bs, "file")
phase := file.Get("phase").ToString()
logx.Debug("upload", "path: ", path, " phase: ", phase)
switch phase {
case "PHASE_TYPE_COMPLETE":
logx.Debug("upload", path, " upload file complete")
return nil
case "PHASE_TYPE_PENDING":
// break switch
break
}
params := jsoniter.Get(bs, "resumable").Get("params")
accessKeyId := params.Get("access_key_id").ToString()
accessKeySecret := params.Get("access_key_secret").ToString()
bucket := params.Get("bucket").ToString()
endpoint := params.Get("endpoint").ToString()
key := params.Get("key").ToString()
securityToken := params.Get("security_token").ToString()
ossArgs := OssArgs{
Bucket: bucket,
AccessKeyId: accessKeyId,
AccessKeySecret: accessKeySecret,
EndPoint: endpoint,
Key: key,
SecurityToken: securityToken,
}
uploadId := p.beforeUpload(ossArgs)
f, err := os.Open(path)
if err != nil {
return err
}
defer f.Close()
wait := new(sync.WaitGroup)
in_wait := new(sync.WaitGroup)
ch := make(chan Part, Concurrent)
var chunkSize = int64(math.Ceil(float64(fileSize) / 10000))
if chunkSize < defaultChunkSize {
chunkSize = defaultChunkSize
}
for i := int64(0); i < Concurrent; i++ {
wait.Add(1)
go uploadChunk(p.requestContext(), wait, ch, f, chunkSize, fileSize, i, ossArgs, uploadId)
}
donePartSlice := make([]Part, 0)
in_wait.Add(1)
go func() {
defer in_wait.Done()
for p := range ch {
donePartSlice = append(donePartSlice, p)
}
}()
wait.Wait()
close(ch)
in_wait.Wait()
sort.Slice(donePartSlice, func(i, j int) bool {
iNum, _ := strconv.Atoi(donePartSlice[i].PartNumber)
jNum, _ := strconv.Atoi(donePartSlice[j].PartNumber)
return iNum < jNum
})
args := CompleteMultipartUpload{
Part: donePartSlice,
}
err = p.afterUpload(&args, ossArgs, uploadId)
if err != nil {
return err
}
return nil
}
func uploadChunk(ctx context.Context, wait *sync.WaitGroup, ch chan Part, f *os.File, ChunkSize, fileSize int64, part int64, ossArgs OssArgs, uploadId string) {
defer wait.Done()
if part*ChunkSize >= fileSize {
return
}
buf := make([]byte, ChunkSize)
var offset = part * ChunkSize
for offset < fileSize {
n, _ := f.ReadAt(buf, offset)
// if err != nil {
// // logrus.Error(err)
// }
if n > 0 {
value := url.Values{}
value.Add("uploadId", uploadId)
value.Add("partNumber", fmt.Sprintf("%d", part+1))
req, err := http.NewRequestWithContext(ctx, "PUT", fmt.Sprintf("https://%s/%s?%s",
ossArgs.EndPoint,
ossArgs.Key,
value.Encode()), bytes.NewBuffer(buf[:n]))
if err != nil {
continue
}
now := time.Now().UTC()
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("X-OSS-Security-Token", ossArgs.SecurityToken)
req.Header.Set("Date", now.Format(http.TimeFormat))
req.Header.Set("Authorization", "OSS "+ossArgs.AccessKeyId+":"+hmacAuthorization(req, nil, now, ossArgs))
resp, err := http.DefaultClient.Do(req)
if err != nil {
continue
}
// bs, _ := io.ReadAll(resp.Body)
eTag := strings.Trim(resp.Header.Get("ETag"), "\"")
p := Part{
PartNumber: fmt.Sprintf("%d", part+1),
ETag: eTag,
}
ch <- p
resp.Body.Close()
}
part = part + Concurrent
offset = part * ChunkSize
}
}
type header struct {
key string
val string
}
type CompleteMultipartUpload struct {
Part []Part `xml:"Part"`
}
type Part struct {
PartNumber string `xml:"PartNumber"`
ETag string `xml:"ETag"`
}
func hmacAuthorization(req *http.Request, body []byte, time time.Time, ossArgs OssArgs) string {
date := time.UTC().Format(http.TimeFormat)
stringBuilder := new(strings.Builder)
stringBuilder.WriteString(req.Method + "\n")
if body == nil {
stringBuilder.WriteString("\n")
} else {
// digest := md5.New()
// digest.Write(body)
// sign := base64.StdEncoding.EncodeToString(digest.Sum(nil))
// stringBuilder.WriteString(sign + "\n")
stringBuilder.WriteString("\n")
}
stringBuilder.WriteString(req.Header.Get("Content-Type") + "\n")
stringBuilder.WriteString(date + "\n")
headerSlice := make([]header, 0)
for k, v := range req.Header {
headerK := strings.ToLower(k)
if strings.Contains(headerK, "x-oss-") && len(v) > 0 {
headerSlice = append(headerSlice, header{headerK, v[0]})
}
}
// 从小到大排序
sort.Slice(headerSlice, func(i, j int) bool {
return headerSlice[i].key < headerSlice[j].key
})
for _, hd := range headerSlice {
stringBuilder.WriteString(hd.key + ":" + hd.val + "\n")
}
stringBuilder.WriteString("/" + ossArgs.Bucket + req.URL.Path + "?" + req.URL.RawQuery)
h := hmac.New(sha1.New, []byte(ossArgs.AccessKeySecret))
h.Write([]byte(stringBuilder.String()))
return base64.StdEncoding.EncodeToString(h.Sum(nil))
}
func (p *PikPak) beforeUpload(ossArgs OssArgs) string {
req, err := p.newRequest("POST", "https://"+ossArgs.EndPoint+"/"+ossArgs.Key+"?uploads", nil)
if err != nil {
return ""
}
time := time.Now().UTC()
req.Header.Set("Date", time.Format(http.TimeFormat))
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("User-Agent", "aliyun-sdk-android/2.9.5(Linux/Android 11/ONEPLUS%20A6000;RKQ1.201217.002)")
req.Header.Set("X-Oss-Security-Token", ossArgs.SecurityToken)
req.Header.Set("Authorization",
fmt.Sprintf("%s %s:%s",
"OSS",
ossArgs.AccessKeyId,
hmacAuthorization(req, nil, time, ossArgs),
))
resp, err := http.DefaultClient.Do(req)
if err != nil {
return ""
}
defer resp.Body.Close()
bs, err := io.ReadAll(resp.Body)
if err != nil {
return ""
}
type InitiateMultipartUploadResult struct {
Bucket string `xml:"Bucket"`
Key string `xml:"Key"`
UploadId string `xml:"UploadId"`
}
res := new(InitiateMultipartUploadResult)
err = xml.Unmarshal(bs, res)
if err != nil {
return ""
}
return res.UploadId
}
func (p *PikPak) afterUpload(args *CompleteMultipartUpload, ossArgs OssArgs, uploadId string) error {
bs, err := xml.Marshal(args)
if err != nil {
return err
}
req, err := p.newRequest("POST", "https://"+ossArgs.EndPoint+"/"+ossArgs.Key+"?uploadId="+uploadId, bytes.NewBuffer(bs))
if err != nil {
return err
}
time := time.Now().UTC()
req.Header.Set("Date", time.Format(http.TimeFormat))
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("User-Agent", "aliyun-sdk-android/2.9.5(Linux/Android 11/ONEPLUS%20A6000;RKQ1.201217.002)")
req.Header.Set("X-Oss-Security-Token", ossArgs.SecurityToken)
req.Header.Set("Authorization",
fmt.Sprintf("%s %s:%s",
"OSS",
ossArgs.AccessKeyId,
hmacAuthorization(req, nil, time, ossArgs),
))
resp, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
_, err = io.ReadAll(resp.Body)
if err != nil {
return err
}
return nil
}
================================================
FILE: internal/api/url.go
================================================
package api
import (
"bytes"
"fmt"
"github.com/52funny/pikpakcli/internal/logx"
jsoniter "github.com/json-iterator/go"
)
func (p *PikPak) CreateUrlFile(parentId, url string) error {
m := map[string]interface{}{
"kind": FileKindFile,
"upload_type": "UPLOAD_TYPE_URL",
"url": map[string]string{
"url": url,
},
}
if parentId != "" {
m["parent_id"] = parentId
}
bs, err := jsoniter.Marshal(&m)
if err != nil {
return err
}
START:
req, err := p.newRequest("POST", "https://api-drive.mypikpak.com/drive/v1/files", bytes.NewBuffer(bs))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json; charset=utf-8")
req.Header.Set("Product_flavor_name", "cha")
req.Header.Set("X-Captcha-Token", p.CaptchaToken)
req.Header.Set("X-Client-Version-Code", "10083")
req.Header.Set("X-Peer-Id", p.DeviceId)
req.Header.Set("X-User-Region", "1")
req.Header.Set("X-Alt-Capability", "3")
req.Header.Set("Country", "CN")
bs, err = p.sendRequest(req)
if err != nil {
return err
}
error_code := jsoniter.Get(bs, "error_code").ToInt()
if error_code != 0 {
if error_code == 9 {
err := p.AuthCaptchaToken("POST:/drive/v1/files")
if err != nil {
return err
}
goto START
}
return fmt.Errorf("upload file error: %s", jsoniter.Get(bs, "error").ToString())
}
task := jsoniter.Get(bs, "task")
logx.Debug("api", task.ToString())
// phase := task.Get("phase").ToString()
// if phase == "PHASE_TYPE_COMPLETE" {
// return nil
// } else {
// return fmt.Errorf("create file error: %s", phase)
// }
return nil
}
================================================
FILE: internal/logx/logx.go
================================================
package logx
import (
"fmt"
"log/slog"
"os"
"strings"
)
var enabledTopics = map[string]struct{}{}
var debugEnabled bool
var logger = slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
Level: slog.LevelError,
}))
func Init(debug bool, topics []string) {
enabledTopics = parseTopics(topics)
debugEnabled = debug || envEnabled("PIKPAKCLI_DEBUG") || len(enabledTopics) > 0
level := slog.LevelError
if debugEnabled {
level = slog.LevelDebug
}
logger = slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
Level: level,
}))
slog.SetDefault(logger)
}
func Enabled(topic string) bool {
if !debugEnabled {
return false
}
if topic == "" {
return true
}
if _, ok := enabledTopics["all"]; ok {
return true
}
_, ok := enabledTopics[topic]
return ok
}
func Debug(topic string, args ...any) {
if Enabled(topic) {
logger.Debug(fmt.Sprint(args...))
}
}
func Debugln(topic string, args ...any) {
if Enabled(topic) {
logger.Debug(fmt.Sprintln(args...))
}
}
func Warn(topic string, args ...any) {
if Enabled(topic) {
logger.Warn(fmt.Sprint(args...))
}
}
func Warnf(topic, format string, args ...any) {
if Enabled(topic) {
logger.Warn(fmt.Sprintf(format, args...))
}
}
func Error(args ...any) {
logger.Error(fmt.Sprint(args...))
}
func Errorf(format string, args ...any) {
logger.Error(fmt.Sprintf(format, args...))
}
func parseTopics(topics []string) map[string]struct{} {
res := map[string]struct{}{}
for _, topic := range topics {
for _, item := range strings.Split(topic, ",") {
item = strings.TrimSpace(strings.ToLower(item))
if item == "" {
continue
}
res[item] = struct{}{}
}
}
envTopics := strings.Split(os.Getenv("PIKPAKCLI_DEBUG_TOPICS"), ",")
for _, item := range envTopics {
item = strings.TrimSpace(strings.ToLower(item))
if item == "" {
continue
}
res[item] = struct{}{}
}
if envEnabled("PIKPAKCLI_DEBUG") {
res["all"] = struct{}{}
}
return res
}
func envEnabled(key string) bool {
value := strings.TrimSpace(strings.ToLower(os.Getenv(key)))
switch value {
case "1", "true", "yes", "on", "debug", "all":
return true
default:
return false
}
}
================================================
FILE: internal/shell/open.go
================================================
package shell
import (
"errors"
"fmt"
"os"
"os/exec"
"path/filepath"
"runtime"
"strconv"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/utils"
)
const (
openCategoryDefault = "default"
openCategoryText = "text"
openCategoryImage = "image"
openCategoryVideo = "video"
openCategoryAudio = "audio"
openCategoryPDF = "pdf"
)
type openFileService interface {
GetFileByPath(path string) (api.FileStat, error)
GetFile(fileID string) (api.File, error)
}
func handleOpenCommand(p openFileService, currentPath string, args []string) error {
if len(args) == 0 {
return errors.New("usage: open [remote-file...]")
}
for _, arg := range args {
targetPath := resolveShellPath(currentPath, arg)
stat, err := p.GetFileByPath(targetPath)
if err != nil {
return fmt.Errorf("open: %s: %w", targetPath, err)
}
if stat.Kind == api.FileKindFolder {
return fmt.Errorf("open: %s: folders are not supported", targetPath)
}
file, err := p.GetFile(stat.ID)
if err != nil {
return fmt.Errorf("open: %s: get file failed: %w", targetPath, err)
}
openTarget, err := resolveOpenTarget(&file)
if err != nil {
return fmt.Errorf("open: %s: resolve open target failed: %w", targetPath, err)
}
if err := openWithLocalApp(openTarget, classifyOpenCategory(file.Name)); err != nil {
return fmt.Errorf("open: %s: launch local app failed: %w", targetPath, err)
}
fmt.Printf("Opened %s -> %s\n", targetPath, openTarget)
}
return nil
}
func resolveOpenTarget(file *api.File) (string, error) {
if classifyOpenCategory(file.Name) == openCategoryVideo {
if url := remoteVideoOpenURL(file); url != "" {
return url, nil
}
}
return cacheOpenFile(file)
}
func cacheOpenFile(file *api.File) (string, error) {
cacheRoot, err := openCacheRoot()
if err != nil {
return "", err
}
cacheDir := filepath.Join(cacheRoot, file.ID)
if err := utils.CreateDirIfNotExist(cacheDir); err != nil {
return "", err
}
localPath := filepath.Join(cacheDir, file.Name)
matched, err := localFileMatchesRemoteSize(localPath, file.Size)
if err != nil {
return "", err
}
if matched {
return localPath, nil
}
if err := file.Download(localPath, nil); err != nil {
return "", err
}
return localPath, nil
}
func openCacheRoot() (string, error) {
if strings.TrimSpace(conf.Config.Open.DownloadDir) != "" {
root := utils.ExpandLocalPath(conf.Config.Open.DownloadDir)
if err := utils.CreateDirIfNotExist(root); err != nil {
return "", err
}
return root, nil
}
cacheDir, err := os.UserCacheDir()
if err != nil {
cacheDir = filepath.Join(os.TempDir(), "pikpakcli")
}
root := filepath.Join(cacheDir, "pikpakcli", "open")
if err := utils.CreateDirIfNotExist(root); err != nil {
return "", err
}
return root, nil
}
func localFileMatchesRemoteSize(path string, remoteSize string) (bool, error) {
expectedSize, err := strconv.ParseInt(remoteSize, 10, 64)
if err != nil || expectedSize < 0 {
return false, nil
}
info, err := os.Stat(path)
if err != nil {
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
return info.Size() == expectedSize, nil
}
func classifyOpenCategory(name string) string {
switch strings.ToLower(filepath.Ext(name)) {
case ".txt", ".md", ".markdown", ".log", ".json", ".yaml", ".yml", ".toml", ".ini", ".cfg", ".conf", ".csv",
".go", ".rs", ".py", ".js", ".ts", ".tsx", ".jsx", ".java", ".c", ".cc", ".cpp", ".h", ".hpp", ".sh", ".zsh":
return openCategoryText
case ".jpg", ".jpeg", ".png", ".gif", ".bmp", ".webp", ".svg", ".heic", ".tiff":
return openCategoryImage
case ".mp4", ".mkv", ".mov", ".avi", ".wmv", ".flv", ".webm", ".m4v":
return openCategoryVideo
case ".mp3", ".flac", ".wav", ".aac", ".m4a", ".ogg", ".opus":
return openCategoryAudio
case ".pdf":
return openCategoryPDF
default:
return openCategoryDefault
}
}
func remoteVideoOpenURL(file *api.File) string {
for _, media := range file.Medias {
if media.IsDefault && media.IsVisible && strings.TrimSpace(media.Link.URL) != "" {
return media.Link.URL
}
}
for _, media := range file.Medias {
if media.IsVisible && strings.TrimSpace(media.Link.URL) != "" {
return media.Link.URL
}
}
for _, media := range file.Medias {
if strings.TrimSpace(media.Link.URL) != "" {
return media.Link.URL
}
}
if strings.TrimSpace(file.Links.ApplicationOctetStream.URL) != "" {
return file.Links.ApplicationOctetStream.URL
}
if strings.TrimSpace(file.WebContentLink) != "" {
return file.WebContentLink
}
return ""
}
func openWithLocalApp(target string, category string) error {
name, args, err := buildOpenCommand(runtime.GOOS, conf.Config.Open, target, category)
if err != nil {
return err
}
cmd := exec.Command(name, args...)
return cmd.Start()
}
func buildOpenCommand(goos string, cfg conf.OpenConfig, path string, category string) (string, []string, error) {
command := commandForCategory(cfg, category)
if len(command) == 0 {
command = defaultOpenCommand(goos, category)
}
if len(command) == 0 {
return "", nil, fmt.Errorf("unsupported platform: %s", goos)
}
resolved := make([]string, 0, len(command)+1)
hasPlaceholder := false
for _, item := range command {
if item == "{path}" {
resolved = append(resolved, path)
hasPlaceholder = true
continue
}
resolved = append(resolved, item)
}
if !hasPlaceholder {
resolved = append(resolved, path)
}
return resolved[0], resolved[1:], nil
}
func commandForCategory(cfg conf.OpenConfig, category string) []string {
switch category {
case openCategoryText:
if len(cfg.Text) > 0 {
return append([]string{}, cfg.Text...)
}
case openCategoryImage:
if len(cfg.Image) > 0 {
return append([]string{}, cfg.Image...)
}
case openCategoryVideo:
if len(cfg.Video) > 0 {
return append([]string{}, cfg.Video...)
}
case openCategoryAudio:
if len(cfg.Audio) > 0 {
return append([]string{}, cfg.Audio...)
}
case openCategoryPDF:
if len(cfg.PDF) > 0 {
return append([]string{}, cfg.PDF...)
}
}
if len(cfg.Default) > 0 {
return append([]string{}, cfg.Default...)
}
return nil
}
func defaultOpenCommand(goos string, category string) []string {
switch goos {
case "darwin":
switch category {
case openCategoryText:
return []string{"open", "-a", "TextEdit"}
case openCategoryImage, openCategoryPDF:
return []string{"open", "-a", "Preview"}
case openCategoryVideo, openCategoryAudio:
return []string{"open", "-a", "IINA"}
default:
return []string{"open"}
}
case "linux":
return []string{"xdg-open"}
case "windows":
return []string{"cmd", "/c", "start", ""}
default:
return nil
}
}
================================================
FILE: internal/shell/shell.go
================================================
package shell
import (
"context"
"fmt"
"io"
"os"
"os/signal"
"path"
"path/filepath"
"slices"
"strings"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/52funny/pikpakcli/internal/logx"
"github.com/52funny/pikpakcli/internal/utils"
"github.com/chzyer/readline"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
var builtInCommands = []string{"cd", "clear", "exit", "help", "open", "quit"}
const clearScreenSequence = "\033[H\033[2J"
type fileStatProvider interface {
GetPathFolderId(dirPath string) (string, error)
GetFolderFileStatList(parentId string) ([]api.FileStat, error)
}
type shellAutoCompleter struct {
rootCmd *cobra.Command
fileStatSource fileStatProvider
currentPath func() string
}
// Start starts the interactive shell
func Start(rootCmd *cobra.Command) {
fmt.Println("PikPak CLI Interactive Shell")
fmt.Println("Type 'help' for available commands, 'exit' or Ctrl-D to quit")
fmt.Println()
currentPath := "/"
p := api.NewPikPak(conf.Config.Username, conf.Config.Password)
if err := p.Login(); err != nil {
fmt.Println("Login failed")
logx.Error(err)
return
}
l, err := readline.NewEx(&readline.Config{
Prompt: promptForPath(currentPath),
AutoComplete: &shellAutoCompleter{
rootCmd: rootCmd,
fileStatSource: &p,
currentPath: func() string {
return currentPath
},
},
})
if err != nil {
fmt.Println("Initialize readline failed")
logx.Error(err)
return
}
defer l.Close()
for {
input, err := l.Readline()
if isReadlineInterrupt(err) {
fmt.Println()
l.SetPrompt(promptForPath(currentPath))
continue
}
if shouldExitOnReadlineError(err) {
fmt.Println("\nBye~!")
return
}
if err != nil {
fmt.Println("\nBye~!")
break
}
input = strings.TrimSpace(input)
if input == "" {
continue
}
args := parseShellArgs(input)
if len(args) == 0 {
continue
}
switch args[0] {
case "exit", "quit":
fmt.Println("Bye~!")
return
case "help":
rootCmd.Help()
continue
case "clear":
clearScreen(os.Stdout)
l.SetPrompt(promptForPath(currentPath))
continue
case "cd":
nextPath, err := changeDirectory(&p, currentPath, args[1:])
if err != nil {
fmt.Println("Change directory failed")
logx.Error(err)
continue
}
currentPath = nextPath
l.SetPrompt(promptForPath(currentPath))
continue
case "open":
expandedArgs, err := expandOpenGlobs(currentPath, &p, args[1:])
if err != nil {
fmt.Println(err.Error())
continue
}
cmdCtx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
if err := handleOpenCommand(p.WithContext(cmdCtx), currentPath, expandedArgs); err != nil {
fmt.Println(err.Error())
}
stop()
if cmdCtx.Err() != nil {
fmt.Println()
}
continue
}
args = adaptShellArgs(rootCmd, currentPath, args)
args, err = expandShellGlobs(rootCmd, currentPath, &p, args)
if err != nil {
fmt.Println(err.Error())
continue
}
cmdCtx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
setCommandContextTree(rootCmd, cmdCtx)
rootCmd.SetArgs(args)
rootCmd.Execute()
stop()
setCommandContextTree(rootCmd, context.Background())
rootCmd.SetArgs([]string{})
resetFlags(rootCmd)
if cmdCtx.Err() != nil {
fmt.Println()
}
}
}
func shouldExitOnReadlineError(err error) bool {
return err == io.EOF
}
func isReadlineInterrupt(err error) bool {
return err == readline.ErrInterrupt
}
func setCommandContextTree(cmd *cobra.Command, ctx context.Context) {
cmd.SetContext(ctx)
for _, child := range cmd.Commands() {
setCommandContextTree(child, ctx)
}
}
func (c *shellAutoCompleter) Do(line []rune, pos int) ([][]rune, int) {
input := string(line[:pos])
tokens, active, endedWithSpace := splitCompletionLine(input)
if len(tokens) == 0 {
return completeFromPrefix(active, commandCandidates(c.rootCmd), true)
}
if tokens[0] == "cd" {
return c.completeRemotePath(active, true)
}
if tokens[0] == "open" {
return c.completeRemotePath(active, false)
}
cmd, consumed := resolveCommand(c.rootCmd, tokens)
if consumed == 0 && !endedWithSpace {
return completeFromPrefix(active, commandCandidates(c.rootCmd), true)
}
if cmd == nil {
return nil, 0
}
if len(cmd.Commands()) > 0 && (endedWithSpace || active != "") && len(tokens) == consumed {
return completeFromPrefix(active, subcommandCandidates(cmd), true)
}
if strings.HasPrefix(active, "-") {
return completeFromPrefix(active, flagCandidates(cmd), true)
}
commandKey := canonicalCommandKey(c.rootCmd, cmd)
if shouldCompleteLocalPathFlagValue(commandKey, tokens, active, endedWithSpace) {
return completeLocalPath(active, false)
}
if shouldCompleteDirectoryPath(commandKey, tokens, active, endedWithSpace, consumed) {
return c.completeRemotePath(active, true)
}
if shouldCompleteRemoteTargetPath(commandKey, tokens, active, consumed) {
return c.completeRemotePath(active, false)
}
if shouldCompleteLocalTargetPath(commandKey, tokens, active, consumed) {
return completeLocalPath(active, false)
}
return nil, 0
}
func shouldCompleteLocalPathFlagValue(commandKey string, tokens []string, active string, endedWithSpace bool) bool {
if commandKey == "" {
return false
}
switch commandKey {
case "rubbish":
return wantsFlagValue(tokens, active, endedWithSpace, "--rules")
default:
return false
}
}
func shouldCompleteDirectoryPath(commandKey string, tokens []string, active string, endedWithSpace bool, consumed int) bool {
if commandKey == "" {
return false
}
if wantsFlagValue(tokens, active, endedWithSpace, "-p", "--path") {
switch commandKey {
case "ls", "empty", "rubbish", "download", "share", "upload", "delete", "new folder", "new url", "new sha":
return true
}
}
positionalsAfterCommand := positionalTokens(tokens[consumed:], active)
switch commandKey {
case "ls", "empty", "rubbish":
return len(positionalsAfterCommand) <= 1
}
return false
}
func shouldCompleteRemoteTargetPath(commandKey string, tokens []string, active string, consumed int) bool {
if commandKey == "" || active == "" {
return false
}
positionalsAfterCommand := positionalTokens(tokens[consumed:], active)
switch commandKey {
case "download", "share", "delete":
return len(positionalsAfterCommand) >= 1
case "rename":
return len(positionalsAfterCommand) == 1
default:
return false
}
}
func shouldCompleteLocalTargetPath(commandKey string, tokens []string, active string, consumed int) bool {
if commandKey == "" || active == "" {
return false
}
positionalsAfterCommand := positionalTokens(tokens[consumed:], active)
switch commandKey {
case "upload":
return len(positionalsAfterCommand) >= 1
default:
return false
}
}
func wantsFlagValue(tokens []string, active string, endedWithSpace bool, flags ...string) bool {
if len(tokens) == 0 {
return false
}
last := tokens[len(tokens)-1]
if endedWithSpace {
return slices.Contains(flags, last)
}
if active != "" {
return slices.Contains(flags, last)
}
return false
}
func positionalTokens(tokens []string, active string) []string {
positionals := make([]string, 0)
stopFlags := false
for i := 0; i < len(tokens); i++ {
token := tokens[i]
if stopFlags {
positionals = append(positionals, token)
continue
}
switch {
case token == "--":
stopFlags = true
case token == "-p" || token == "--path" ||
token == "-P" || token == "--parent-id" ||
token == "-o" || token == "--output" ||
token == "-i" || token == "--input" ||
token == "-c" || token == "--count" ||
token == "--rules":
if i+1 < len(tokens) {
i++
}
case strings.HasPrefix(token, "-"):
default:
positionals = append(positionals, token)
}
}
if active != "" {
positionals = append(positionals, active)
}
return positionals
}
func (c *shellAutoCompleter) completeRemotePath(prefix string, onlyDirs bool) ([][]rune, int) {
currentPath := c.currentPath()
targetPath := resolveShellPath(currentPath, prefix)
basePrefix := prefix
if strings.TrimSpace(prefix) == "" {
targetPath = currentPath
basePrefix = ""
}
parentPath := targetPath
namePrefix := ""
if prefix != "" && !strings.HasSuffix(prefix, "/") {
parentPath = path.Dir(targetPath)
if parentPath == "." {
parentPath = "/"
}
namePrefix = path.Base(targetPath)
}
parentID := ""
var err error
if parentPath != "/" {
parentID, err = c.fileStatSource.GetPathFolderId(parentPath)
if err != nil {
return nil, len([]rune(basePrefix))
}
}
files, err := c.fileStatSource.GetFolderFileStatList(parentID)
if err != nil {
return nil, len([]rune(basePrefix))
}
candidates := make([]string, 0)
for _, file := range files {
if onlyDirs && file.Kind != api.FileKindFolder {
continue
}
if !strings.HasPrefix(file.Name, namePrefix) {
continue
}
remaining := file.Name[len(namePrefix):]
if file.Kind == api.FileKindFolder {
remaining += "/"
}
candidates = append(candidates, escapeShellCompletion(remaining))
}
return toRuneCandidates(candidates), len([]rune(basePrefix))
}
func completeLocalPath(prefix string, onlyDirs bool) ([][]rune, int) {
expandedPrefix := utils.ExpandLocalPath(prefix)
parentPath := "."
basePrefix := prefix
namePrefix := expandedPrefix
hasTrailingSeparator := strings.HasSuffix(prefix, string(filepath.Separator))
if strings.TrimSpace(prefix) == "" {
basePrefix = ""
namePrefix = ""
} else if !hasTrailingSeparator {
parentPath = filepath.Dir(expandedPrefix)
if parentPath == "." && filepath.IsAbs(expandedPrefix) {
parentPath = string(filepath.Separator)
}
namePrefix = filepath.Base(expandedPrefix)
} else {
parentPath = expandedPrefix
namePrefix = ""
}
entries, err := os.ReadDir(parentPath)
if err != nil {
return nil, len([]rune(basePrefix))
}
candidates := make([]string, 0)
for _, entry := range entries {
if onlyDirs && !entry.IsDir() {
continue
}
if !strings.HasPrefix(entry.Name(), namePrefix) {
continue
}
remaining := entry.Name()[len(namePrefix):]
if entry.IsDir() {
remaining += string(filepath.Separator)
}
candidates = append(candidates, escapeShellCompletion(remaining))
}
return toRuneCandidates(candidates), len([]rune(basePrefix))
}
func promptForPath(currentPath string) string {
if currentPath == "/" {
return "pikpak / > "
}
return fmt.Sprintf("pikpak %s/ > ", currentPath)
}
func clearScreen(w io.Writer) {
fmt.Fprint(w, clearScreenSequence)
}
func adaptShellArgs(rootCmd *cobra.Command, currentPath string, args []string) []string {
if len(args) == 0 {
return args
}
cmd, consumed := resolveCommand(rootCmd, args)
if consumed == 0 {
return args
}
commandKey := canonicalCommandKey(rootCmd, cmd)
rest := append([]string{}, args[consumed:]...)
flags := inspectShellArgs(rest)
switch commandKey {
case "ls", "empty", "rubbish":
rest = rewritePositionalPaths(rest, currentPath, 1)
if flags.positionals == 0 && !flags.hasPath && !flags.hasParentID {
rest = append([]string{"-p", currentPath}, rest...)
}
case "download":
rest = rewritePathFlagValues(rest, currentPath)
if flags.positionals > 0 && !flags.hasPath && !flags.hasParentID {
rest = append([]string{"-p", currentPath}, rest...)
}
case "upload":
rest = rewritePathFlagValues(rest, currentPath)
if flags.positionals > 0 && !flags.hasPath && !flags.hasParentID {
rest = append([]string{"-p", currentPath}, rest...)
}
case "share", "new folder", "new url", "new sha":
rest = rewritePathFlagValues(rest, currentPath)
if !flags.hasPath && !flags.hasParentID {
rest = append([]string{"-p", currentPath}, rest...)
}
case "delete":
if !flags.hasPath {
rest = rewritePositionalPaths(rest, currentPath, -1)
}
case "rename":
rest = rewritePositionalPaths(rest, currentPath, 1)
}
return append(append([]string{}, args[:consumed]...), rest...)
}
func canonicalCommandKey(rootCmd *cobra.Command, cmd *cobra.Command) string {
if cmd == nil {
return ""
}
path := cmd.CommandPath()
rootName := rootCmd.Name()
if path == rootName {
return ""
}
return strings.TrimPrefix(path, rootName+" ")
}
type shellArgFlags struct {
hasPath bool
hasParentID bool
positionals int
}
func inspectShellArgs(args []string) shellArgFlags {
var flags shellArgFlags
stopFlags := false
for i := 0; i < len(args); i++ {
token := args[i]
if stopFlags {
flags.positionals++
continue
}
switch {
case token == "--":
stopFlags = true
case token == "--path" || token == "-p":
flags.hasPath = true
if i+1 < len(args) {
i++
}
case strings.HasPrefix(token, "--path=") || strings.HasPrefix(token, "-p="):
flags.hasPath = true
case token == "--parent-id" || token == "-P":
flags.hasParentID = true
if i+1 < len(args) {
i++
}
case token == "--rules":
if i+1 < len(args) {
i++
}
case strings.HasPrefix(token, "--parent-id=") || strings.HasPrefix(token, "-P="):
flags.hasParentID = true
case strings.HasPrefix(token, "-"):
default:
flags.positionals++
}
}
return flags
}
func rewritePathFlagValues(args []string, currentPath string) []string {
rewritten := append([]string{}, args...)
for i := 0; i < len(rewritten); i++ {
switch token := rewritten[i]; {
case token == "--path" || token == "-p":
if i+1 < len(rewritten) {
rewritten[i+1] = resolveShellPath(currentPath, rewritten[i+1])
i++
}
case strings.HasPrefix(token, "--path="):
rewritten[i] = "--path=" + resolveShellPath(currentPath, strings.TrimPrefix(token, "--path="))
case strings.HasPrefix(token, "-p="):
rewritten[i] = "-p=" + resolveShellPath(currentPath, strings.TrimPrefix(token, "-p="))
}
}
return rewritten
}
func rewritePositionalPaths(args []string, currentPath string, limit int) []string {
rewritten := append([]string{}, args...)
stopFlags := false
rewrittenCount := 0
for i := 0; i < len(rewritten); i++ {
token := rewritten[i]
if stopFlags {
if limit < 0 || rewrittenCount < limit {
rewritten[i] = resolveShellPath(currentPath, token)
rewrittenCount++
}
continue
}
switch {
case token == "--":
stopFlags = true
case token == "--path" || token == "-p" || token == "--parent-id" || token == "-P" || token == "--output" || token == "-o" || token == "--input" || token == "-i" || token == "--count" || token == "-c" || token == "--rules":
if i+1 < len(rewritten) {
i++
}
case strings.HasPrefix(token, "-"):
default:
if limit >= 0 && rewrittenCount >= limit {
continue
}
rewritten[i] = resolveShellPath(currentPath, token)
rewrittenCount++
}
}
return rewritten
}
func changeDirectory(p *api.PikPak, currentPath string, args []string) (string, error) {
target := "/"
if len(args) > 0 {
target = args[0]
}
targetPath := resolveShellPath(currentPath, target)
if targetPath == "/" {
return targetPath, nil
}
if _, err := p.GetPathFolderId(targetPath); err != nil {
return "", fmt.Errorf("cd: %s: no such directory", targetPath)
}
return targetPath, nil
}
func resolveShellPath(currentPath string, target string) string {
switch strings.TrimSpace(target) {
case "", "~", "/":
return "/"
}
if strings.HasPrefix(target, "/") {
return path.Clean(target)
}
return path.Clean(path.Join(currentPath, target))
}
func expandOpenGlobs(currentPath string, source fileStatProvider, args []string) ([]string, error) {
expanded := make([]string, 0, len(args))
for _, arg := range args {
matches, err := expandRemotePatternToken(arg, "", currentPath, source, false)
if err != nil {
return nil, err
}
expanded = append(expanded, matches...)
}
return expanded, nil
}
func expandShellGlobs(rootCmd *cobra.Command, currentPath string, source fileStatProvider, args []string) ([]string, error) {
if len(args) == 0 {
return args, nil
}
cmd, consumed := resolveCommand(rootCmd, args)
if consumed == 0 {
return args, nil
}
commandKey := canonicalCommandKey(rootCmd, cmd)
rest := append([]string{}, args[consumed:]...)
var (
expanded []string
err error
)
switch commandKey {
case "download":
expanded, err = expandDownloadGlobs(rest, currentPath, source)
case "delete":
expanded, err = expandDeleteGlobs(rest, currentPath, source)
case "upload":
expanded, err = expandUploadGlobs(rest)
default:
return args, nil
}
if err != nil {
return nil, err
}
return append(append([]string{}, args[:consumed]...), expanded...), nil
}
func expandDownloadGlobs(args []string, currentPath string, source fileStatProvider) ([]string, error) {
return rewriteDownloadLikeArgs(args, currentPath, source)
}
func expandDeleteGlobs(args []string, currentPath string, source fileStatProvider) ([]string, error) {
rewritten := make([]string, 0, len(args))
stopFlags := false
pathValue := ""
for i := 0; i < len(args); i++ {
token := args[i]
if stopFlags {
matches, err := expandDeletePatternToken(token, pathValue, currentPath, source)
if err != nil {
return nil, err
}
rewritten = append(rewritten, matches...)
continue
}
switch {
case token == "--":
stopFlags = true
rewritten = append(rewritten, token)
case token == "--path" || token == "-p":
rewritten = append(rewritten, token)
if i+1 < len(args) {
pathValue = args[i+1]
rewritten = append(rewritten, pathValue)
i++
}
case strings.HasPrefix(token, "--path="):
pathValue = strings.TrimPrefix(token, "--path=")
rewritten = append(rewritten, token)
case strings.HasPrefix(token, "-p="):
pathValue = strings.TrimPrefix(token, "-p=")
rewritten = append(rewritten, token)
default:
if consumesNextValue(token) {
rewritten = append(rewritten, token)
if i+1 < len(args) {
rewritten = append(rewritten, args[i+1])
i++
}
continue
}
if strings.HasPrefix(token, "-") {
rewritten = append(rewritten, token)
continue
}
matches, err := expandDeletePatternToken(token, pathValue, currentPath, source)
if err != nil {
return nil, err
}
rewritten = append(rewritten, matches...)
}
}
return rewritten, nil
}
func expandUploadGlobs(args []string) ([]string, error) {
rewritten := make([]string, 0, len(args))
stopFlags := false
for i := 0; i < len(args); i++ {
token := args[i]
if stopFlags {
matches, err := expandLocalPatternToken(token)
if err != nil {
return nil, err
}
rewritten = append(rewritten, matches...)
continue
}
switch {
case token == "--":
stopFlags = true
rewritten = append(rewritten, token)
case token == "--path" || token == "-p" ||
token == "--parent-id" || token == "-P" ||
token == "--concurrency" || token == "-c" ||
token == "--exn" || token == "-e":
rewritten = append(rewritten, token)
if i+1 < len(args) {
rewritten = append(rewritten, args[i+1])
i++
}
case strings.HasPrefix(token, "--path=") ||
strings.HasPrefix(token, "-p=") ||
strings.HasPrefix(token, "--parent-id=") ||
strings.HasPrefix(token, "-P=") ||
strings.HasPrefix(token, "--concurrency=") ||
strings.HasPrefix(token, "-c=") ||
strings.HasPrefix(token, "--exn=") ||
strings.HasPrefix(token, "-e=") ||
strings.HasPrefix(token, "-"):
rewritten = append(rewritten, token)
default:
matches, err := expandLocalPatternToken(token)
if err != nil {
return nil, err
}
rewritten = append(rewritten, matches...)
}
}
return rewritten, nil
}
func rewriteDownloadLikeArgs(args []string, currentPath string, source fileStatProvider) ([]string, error) {
rewritten := make([]string, 0, len(args))
stopFlags := false
pathValue := ""
hasParentID := false
for i := 0; i < len(args); i++ {
token := args[i]
if stopFlags {
matches, err := expandRemotePatternToken(token, pathValue, currentPath, source, true)
if err != nil {
return nil, err
}
rewritten = append(rewritten, matches...)
continue
}
switch {
case token == "--":
stopFlags = true
rewritten = append(rewritten, token)
case token == "--path" || token == "-p":
rewritten = append(rewritten, token)
if i+1 < len(args) {
pathValue = args[i+1]
rewritten = append(rewritten, pathValue)
i++
}
case strings.HasPrefix(token, "--path="):
pathValue = strings.TrimPrefix(token, "--path=")
rewritten = append(rewritten, token)
case strings.HasPrefix(token, "-p="):
pathValue = strings.TrimPrefix(token, "-p=")
rewritten = append(rewritten, token)
case token == "--parent-id" || token == "-P":
hasParentID = true
rewritten = append(rewritten, token)
if i+1 < len(args) {
rewritten = append(rewritten, args[i+1])
i++
}
case strings.HasPrefix(token, "--parent-id=") || strings.HasPrefix(token, "-P="):
hasParentID = true
rewritten = append(rewritten, token)
default:
if consumesNextValue(token) {
rewritten = append(rewritten, token)
if i+1 < len(args) {
rewritten = append(rewritten, args[i+1])
i++
}
continue
}
if strings.HasPrefix(token, "-") {
rewritten = append(rewritten, token)
continue
}
if hasParentID && pathValue == "" && hasWildcard(token) {
return nil, fmt.Errorf("shell: wildcard expansion with --parent-id requires --path")
}
matches, err := expandRemotePatternToken(token, pathValue, currentPath, source, true)
if err != nil {
return nil, err
}
rewritten = append(rewritten, matches...)
}
}
return rewritten, nil
}
func expandDeletePatternToken(token string, pathValue string, currentPath string, source fileStatProvider) ([]string, error) {
if !hasWildcard(token) {
return []string{token}, nil
}
if pathValue != "" && !path.IsAbs(token) && strings.Contains(token, "/") {
return nil, fmt.Errorf("shell: wildcard expansion with -p does not support nested remote paths: %s", token)
}
return expandRemotePatternToken(token, pathValue, currentPath, source, pathValue != "")
}
func expandRemotePatternToken(token string, pathValue string, currentPath string, source fileStatProvider, preferRelative bool) ([]string, error) {
if !hasWildcard(token) {
return []string{token}, nil
}
basePath := currentPath
if strings.TrimSpace(pathValue) != "" {
basePath = pathValue
}
patternPath := token
if !path.IsAbs(patternPath) {
patternPath = path.Clean(path.Join(basePath, patternPath))
} else {
patternPath = path.Clean(patternPath)
}
parentPath := path.Dir(patternPath)
if parentPath == "." {
parentPath = "/"
}
matches, err := matchRemotePattern(source, parentPath, path.Base(patternPath))
if err != nil {
return nil, err
}
if len(matches) == 0 {
return nil, fmt.Errorf("shell: no matches found for %s", token)
}
if preferRelative && !path.IsAbs(token) && strings.TrimSpace(pathValue) != "" {
rewritten := make([]string, 0, len(matches))
for _, match := range matches {
rewritten = append(rewritten, relativeRemotePath(pathValue, match))
}
return rewritten, nil
}
return matches, nil
}
func matchRemotePattern(source fileStatProvider, parentPath string, pattern string) ([]string, error) {
parentID := ""
if parentPath != "/" {
var err error
parentID, err = source.GetPathFolderId(parentPath)
if err != nil {
return nil, err
}
}
files, err := source.GetFolderFileStatList(parentID)
if err != nil {
return nil, err
}
matches := make([]string, 0)
for _, file := range files {
matched, err := path.Match(pattern, file.Name)
if err != nil {
return nil, fmt.Errorf("shell: invalid wildcard pattern %s: %w", pattern, err)
}
if matched {
matches = append(matches, path.Join(parentPath, file.Name))
}
}
return matches, nil
}
func expandLocalPatternToken(token string) ([]string, error) {
if !hasWildcard(token) {
return []string{token}, nil
}
pattern := utils.ExpandLocalPath(token)
matches, err := filepath.Glob(pattern)
if err != nil {
return nil, fmt.Errorf("shell: invalid wildcard pattern %s: %w", token, err)
}
if len(matches) == 0 {
return nil, fmt.Errorf("shell: no matches found for %s", token)
}
return matches, nil
}
func consumesNextValue(token string) bool {
switch token {
case "--path", "-p",
"--parent-id", "-P",
"--output", "-o",
"--input", "-i",
"--count", "-c",
"--rules":
return true
default:
return false
}
}
func hasWildcard(value string) bool {
return strings.ContainsAny(value, "*?[")
}
func relativeRemotePath(basePath string, fullPath string) string {
base := path.Clean(basePath)
full := path.Clean(fullPath)
if base == "/" {
return strings.TrimPrefix(full, "/")
}
prefix := base + "/"
if strings.HasPrefix(full, prefix) {
return strings.TrimPrefix(full, prefix)
}
return full
}
func splitCompletionLine(input string) ([]string, string, bool) {
args := make([]string, 0)
var current strings.Builder
inDoubleQuote := false
inSingleQuote := false
endedWithSpace := false
escaped := false
for i := 0; i < len(input); i++ {
ch := input[i]
if escaped {
current.WriteByte(ch)
escaped = false
endedWithSpace = false
continue
}
switch ch {
case '\\':
if inSingleQuote {
current.WriteByte(ch)
} else {
escaped = true
}
endedWithSpace = false
case '"':
endedWithSpace = false
if inSingleQuote {
current.WriteByte(ch)
} else {
inDoubleQuote = !inDoubleQuote
}
case '\'':
endedWithSpace = false
if inDoubleQuote {
current.WriteByte(ch)
} else {
inSingleQuote = !inSingleQuote
}
case ' ', '\t':
if inDoubleQuote || inSingleQuote {
current.WriteByte(ch)
endedWithSpace = false
} else {
if current.Len() > 0 {
args = append(args, current.String())
current.Reset()
}
endedWithSpace = true
}
default:
current.WriteByte(ch)
endedWithSpace = false
}
}
if current.Len() > 0 {
return args, current.String(), false
}
return args, "", endedWithSpace
}
func commandCandidates(rootCmd *cobra.Command) []string {
candidates := append([]string{}, builtInCommands...)
candidates = append(candidates, subcommandCandidates(rootCmd)...)
slices.Sort(candidates)
return slices.Compact(candidates)
}
func subcommandCandidates(cmd *cobra.Command) []string {
candidates := make([]string, 0)
for _, sub := range cmd.Commands() {
if sub.Hidden {
continue
}
candidates = append(candidates, sub.Name())
candidates = append(candidates, sub.Aliases...)
}
slices.Sort(candidates)
return slices.Compact(candidates)
}
func flagCandidates(cmd *cobra.Command) []string {
candidates := make([]string, 0)
cmd.Flags().VisitAll(func(f *pflag.Flag) {
candidates = append(candidates, "--"+f.Name)
if f.Shorthand != "" {
candidates = append(candidates, "-"+f.Shorthand)
}
})
cmd.PersistentFlags().VisitAll(func(f *pflag.Flag) {
candidates = append(candidates, "--"+f.Name)
if f.Shorthand != "" {
candidates = append(candidates, "-"+f.Shorthand)
}
})
slices.Sort(candidates)
return slices.Compact(candidates)
}
func resolveCommand(rootCmd *cobra.Command, tokens []string) (*cobra.Command, int) {
current := rootCmd
consumed := 0
for _, token := range tokens {
matched := false
for _, sub := range current.Commands() {
if sub.Hidden {
continue
}
if token == sub.Name() || slices.Contains(sub.Aliases, token) {
current = sub
consumed++
matched = true
break
}
}
if !matched {
break
}
}
return current, consumed
}
func completeFromPrefix(prefix string, candidates []string, appendSpace bool) ([][]rune, int) {
matches := make([]string, 0)
for _, candidate := range candidates {
if !strings.HasPrefix(candidate, prefix) {
continue
}
suffix := candidate[len(prefix):]
if appendSpace {
suffix += " "
}
matches = append(matches, suffix)
}
return toRuneCandidates(matches), len([]rune(prefix))
}
func toRuneCandidates(candidates []string) [][]rune {
out := make([][]rune, 0, len(candidates))
for _, candidate := range candidates {
out = append(out, []rune(candidate))
}
return out
}
// parseShellArgs parses shell-like arguments
func parseShellArgs(input string) []string {
var args []string
var current strings.Builder
inDoubleQuote := false
inSingleQuote := false
escaped := false
for i := 0; i < len(input); i++ {
ch := input[i]
if escaped {
current.WriteByte(ch)
escaped = false
continue
}
switch ch {
case '\\':
if inSingleQuote {
current.WriteByte(ch)
} else {
escaped = true
}
case '"':
if inSingleQuote {
current.WriteByte(ch)
} else {
inDoubleQuote = !inDoubleQuote
}
case '\'':
if inDoubleQuote {
current.WriteByte(ch)
} else {
inSingleQuote = !inSingleQuote
}
case ' ', '\t':
if inDoubleQuote || inSingleQuote {
current.WriteByte(ch)
} else if current.Len() > 0 {
args = append(args, current.String())
current.Reset()
}
default:
current.WriteByte(ch)
}
}
if current.Len() > 0 {
args = append(args, current.String())
}
return args
}
func escapeShellCompletion(value string) string {
var escaped strings.Builder
escaped.Grow(len(value))
for i := 0; i < len(value); i++ {
switch value[i] {
case ' ', '\\', '"', '\'':
escaped.WriteByte('\\')
}
escaped.WriteByte(value[i])
}
return escaped.String()
}
// resetFlags recursively resets all flags in the command tree to their default values
func resetFlags(cmd *cobra.Command) {
cmd.Flags().VisitAll(func(f *pflag.Flag) {
f.Value.Set(f.DefValue)
})
cmd.PersistentFlags().VisitAll(func(f *pflag.Flag) {
f.Value.Set(f.DefValue)
})
for _, subCmd := range cmd.Commands() {
resetFlags(subCmd)
}
}
================================================
FILE: internal/shell/shell_test.go
================================================
package shell
import (
"context"
"errors"
"io"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/52funny/pikpakcli/conf"
"github.com/52funny/pikpakcli/internal/api"
"github.com/chzyer/readline"
"github.com/spf13/cobra"
"github.com/stretchr/testify/require"
)
func TestParseShellArgs(t *testing.T) {
tests := []struct {
name string
input string
want []string
}{
{
name: "plain args",
input: "ls -l -p /Movies",
want: []string{"ls", "-l", "-p", "/Movies"},
},
{
name: "double quoted path",
input: `cd "/Movies/Kids Cartoons"`,
want: []string{"cd", "/Movies/Kids Cartoons"},
},
{
name: "single quoted path",
input: "cd '/Movies/Kids Cartoons'",
want: []string{"cd", "/Movies/Kids Cartoons"},
},
{
name: "escaped spaces",
input: `cd /My\ Pack/Kids\ Cartoons`,
want: []string{"cd", "/My Pack/Kids Cartoons"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
require.Equal(t, tt.want, parseShellArgs(tt.input))
})
}
}
func TestResolveShellPath(t *testing.T) {
tests := []struct {
name string
currentPath string
target string
want string
}{
{
name: "root home shortcut",
currentPath: "/Movies",
target: "~",
want: "/",
},
{
name: "relative child",
currentPath: "/Movies",
target: "Kids",
want: "/Movies/Kids",
},
{
name: "relative parent",
currentPath: "/Movies/Kids",
target: "..",
want: "/Movies",
},
{
name: "absolute path",
currentPath: "/Movies",
target: "/TV Shows/Drama",
want: "/TV Shows/Drama",
},
{
name: "clean repeated separators",
currentPath: "/Movies",
target: "Kids//Cartoons",
want: "/Movies/Kids/Cartoons",
},
{
name: "empty target goes root",
currentPath: "/Movies/Kids",
target: "",
want: "/",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
require.Equal(t, tt.want, resolveShellPath(tt.currentPath, tt.target))
})
}
}
func TestSplitCompletionLine(t *testing.T) {
tests := []struct {
name string
input string
tokens []string
active string
spaced bool
}{
{
name: "partial command",
input: "sh",
tokens: []string{},
active: "sh",
},
{
name: "command with trailing space",
input: "cd ",
tokens: []string{"cd"},
active: "",
spaced: true,
},
{
name: "quoted path",
input: `cd "/Movies/Kids Cartoons`,
tokens: []string{"cd"},
active: "/Movies/Kids Cartoons",
},
{
name: "escaped spaces",
input: `cd /My\ Pack/Kids\ Cart`,
tokens: []string{"cd"},
active: "/My Pack/Kids Cart",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tokens, active, spaced := splitCompletionLine(tt.input)
require.Equal(t, tt.tokens, tokens)
require.Equal(t, tt.active, active)
require.Equal(t, tt.spaced, spaced)
})
}
}
func TestShouldExitOnReadlineError(t *testing.T) {
require.True(t, shouldExitOnReadlineError(io.EOF))
require.False(t, shouldExitOnReadlineError(nil))
require.False(t, shouldExitOnReadlineError(readline.ErrInterrupt))
require.False(t, shouldExitOnReadlineError(errors.New("other error")))
}
func TestIsReadlineInterrupt(t *testing.T) {
require.True(t, isReadlineInterrupt(readline.ErrInterrupt))
require.False(t, isReadlineInterrupt(nil))
require.False(t, isReadlineInterrupt(io.EOF))
}
func TestSetCommandContextTree(t *testing.T) {
rootCmd := &cobra.Command{Use: "root"}
childCmd := &cobra.Command{Use: "child"}
rootCmd.AddCommand(childCmd)
ctx1, cancel1 := context.WithCancel(context.Background())
setCommandContextTree(rootCmd, ctx1)
cancel1()
require.ErrorIs(t, rootCmd.Context().Err(), context.Canceled)
require.ErrorIs(t, childCmd.Context().Err(), context.Canceled)
ctx2 := context.Background()
setCommandContextTree(rootCmd, ctx2)
require.NoError(t, rootCmd.Context().Err())
require.NoError(t, childCmd.Context().Err())
}
func TestCompleterCommandsAndFlags(t *testing.T) {
rootCmd := &cobra.Command{Use: "pikpakcli"}
listCmd := &cobra.Command{Use: "ls"}
listCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(listCmd)
emptyCmd := &cobra.Command{Use: "empty"}
emptyCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(emptyCmd)
downloadCmd := &cobra.Command{Use: "download"}
downloadCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(downloadCmd)
shareCmd := &cobra.Command{Use: "share"}
shareCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(shareCmd)
rubbishCmd := &cobra.Command{Use: "rubbish"}
rubbishCmd.Flags().StringP("path", "p", "/", "")
rubbishCmd.Flags().String("rules", "", "")
rootCmd.AddCommand(rubbishCmd)
deleteCmd := &cobra.Command{Use: "delete"}
deleteCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(deleteCmd)
renameCmd := &cobra.Command{Use: "rename"}
rootCmd.AddCommand(renameCmd)
rootCmd.AddCommand(&cobra.Command{Use: "shell"})
completer := &shellAutoCompleter{
rootCmd: rootCmd,
fileStatSource: fakeFileStatProvider{
folders: map[string][]api.FileStat{
"": {
{Name: "Movies", Kind: api.FileKindFolder},
{Name: "Music", Kind: api.FileKindFolder},
{Name: "movie.mp4", Kind: api.FileKindFile},
},
},
},
currentPath: func() string {
return "/"
},
}
candidates, offset := completer.Do([]rune("sh"), 2)
require.Equal(t, 2, offset)
require.Contains(t, candidates, []rune("ell "))
require.Contains(t, commandCandidates(rootCmd), "clear")
require.Contains(t, commandCandidates(rootCmd), "open")
candidates, offset = completer.Do([]rune("ls -"), 4)
require.Equal(t, 1, offset)
require.Contains(t, candidates, []rune("p "))
candidates, offset = completer.Do([]rune("ls /Mov"), len("ls /Mov"))
require.Equal(t, len([]rune("/Mov")), offset)
require.Contains(t, candidates, []rune("ies/"))
candidates, offset = completer.Do([]rune("empty -p /Mov"), len("empty -p /Mov"))
require.Equal(t, len([]rune("/Mov")), offset)
require.Contains(t, candidates, []rune("ies/"))
candidates, offset = completer.Do([]rune("download -p /Mov"), len("download -p /Mov"))
require.Equal(t, len([]rune("/Mov")), offset)
require.Contains(t, candidates, []rune("ies/"))
candidates, offset = completer.Do([]rune("download mov"), len("download mov"))
require.Equal(t, len([]rune("mov")), offset)
require.Contains(t, candidates, []rune("ie.mp4"))
candidates, offset = completer.Do([]rune("share mov"), len("share mov"))
require.Equal(t, len([]rune("mov")), offset)
require.Contains(t, candidates, []rune("ie.mp4"))
candidates, offset = completer.Do([]rune("delete mov"), len("delete mov"))
require.Equal(t, len([]rune("mov")), offset)
require.Contains(t, candidates, []rune("ie.mp4"))
candidates, offset = completer.Do([]rune("rename mov"), len("rename mov"))
require.Equal(t, len([]rune("mov")), offset)
require.Contains(t, candidates, []rune("ie.mp4"))
candidates, offset = completer.Do([]rune("open mov"), len("open mov"))
require.Equal(t, len([]rune("mov")), offset)
require.Contains(t, candidates, []rune("ie.mp4"))
}
func TestCompleterUploadLocalPath(t *testing.T) {
tempDir := t.TempDir()
require.NoError(t, os.WriteFile(filepath.Join(tempDir, "local.txt"), []byte("x"), 0644))
require.NoError(t, os.Mkdir(filepath.Join(tempDir, "folder"), 0755))
oldWD, err := os.Getwd()
require.NoError(t, err)
require.NoError(t, os.Chdir(tempDir))
t.Cleanup(func() {
_ = os.Chdir(oldWD)
})
rootCmd := &cobra.Command{Use: "pikpakcli"}
uploadCmd := &cobra.Command{Use: "upload"}
uploadCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(uploadCmd)
completer := &shellAutoCompleter{
rootCmd: rootCmd,
fileStatSource: fakeFileStatProvider{},
currentPath: func() string {
return "/"
},
}
candidates, offset := completer.Do([]rune("upload loc"), len("upload loc"))
require.Equal(t, len([]rune("loc")), offset)
require.Contains(t, candidates, []rune("al.txt"))
candidates, offset = completer.Do([]rune("upload fol"), len("upload fol"))
require.Equal(t, len([]rune("fol")), offset)
require.Contains(t, candidates, []rune("der/"))
}
func TestCompleterUploadHomePath(t *testing.T) {
home, err := os.UserHomeDir()
require.NoError(t, err)
tempHomeRoot := filepath.Dir(home)
homeName := filepath.Base(home)
testDirName := "codex-upload-home-test"
testDir := filepath.Join(home, testDirName)
require.NoError(t, os.MkdirAll(testDir, 0755))
t.Cleanup(func() {
_ = os.RemoveAll(testDir)
})
rootCmd := &cobra.Command{Use: "pikpakcli"}
uploadCmd := &cobra.Command{Use: "upload"}
uploadCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(uploadCmd)
completer := &shellAutoCompleter{
rootCmd: rootCmd,
fileStatSource: fakeFileStatProvider{},
currentPath: func() string {
return "/"
},
}
oldWD, err := os.Getwd()
require.NoError(t, err)
require.NoError(t, os.Chdir(tempHomeRoot))
t.Cleanup(func() {
_ = os.Chdir(oldWD)
})
candidates, offset := completer.Do([]rune("upload ~/"+testDirName[:5]), len("upload ~/"+testDirName[:5]))
require.Equal(t, len([]rune("~/"+testDirName[:5])), offset)
require.Contains(t, candidates, []rune(testDirName[5:]+"/"))
require.NotEmpty(t, homeName)
candidates, offset = completer.Do([]rune("upload ~/"), len("upload ~/"))
require.Equal(t, len([]rune("~/")), offset)
require.Contains(t, candidates, []rune(testDirName+"/"))
}
func TestCompleterRubbishRulesLocalThenRemotePath(t *testing.T) {
tempDir := t.TempDir()
require.NoError(t, os.WriteFile(filepath.Join(tempDir, "rubbish_rules.txt"), []byte("*.tmp\n"), 0644))
oldWD, err := os.Getwd()
require.NoError(t, err)
require.NoError(t, os.Chdir(tempDir))
t.Cleanup(func() {
_ = os.Chdir(oldWD)
})
rootCmd := &cobra.Command{Use: "pikpakcli"}
rubbishCmd := &cobra.Command{Use: "rubbish"}
rubbishCmd.Flags().StringP("path", "p", "/", "")
rubbishCmd.Flags().String("rules", "", "")
rootCmd.AddCommand(rubbishCmd)
completer := &shellAutoCompleter{
rootCmd: rootCmd,
fileStatSource: fakeFileStatProvider{
folders: map[string][]api.FileStat{
"": {
{Name: "My Pack", Kind: api.FileKindFolder},
{Name: "Movies", Kind: api.FileKindFolder},
},
},
},
currentPath: func() string {
return "/"
},
}
candidates, offset := completer.Do([]rune("rubbish --rules rub"), len("rubbish --rules rub"))
require.Equal(t, len([]rune("rub")), offset)
require.Contains(t, candidates, []rune("bish_rules.txt"))
candidates, offset = completer.Do([]rune("rubbish --rules rubbish_rules.txt /My"), len("rubbish --rules rubbish_rules.txt /My"))
require.Equal(t, len([]rune("/My")), offset)
require.Contains(t, candidates, []rune("\\ Pack/"))
}
func TestClearScreen(t *testing.T) {
var out strings.Builder
clearScreen(&out)
require.Equal(t, clearScreenSequence, out.String())
}
func TestAdaptShellArgs(t *testing.T) {
rootCmd := &cobra.Command{Use: "pikpakcli"}
listCmd := &cobra.Command{Use: "ls"}
listCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(listCmd)
emptyCmd := &cobra.Command{Use: "empty"}
emptyCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(emptyCmd)
downloadCmd := &cobra.Command{Use: "download"}
downloadCmd.Flags().StringP("path", "p", "/", "")
downloadCmd.Flags().StringP("parent-id", "P", "", "")
rootCmd.AddCommand(downloadCmd)
shareCmd := &cobra.Command{Use: "share"}
shareCmd.Flags().StringP("path", "p", "/", "")
shareCmd.Flags().StringP("parent-id", "P", "", "")
rootCmd.AddCommand(shareCmd)
uploadCmd := &cobra.Command{Use: "upload"}
uploadCmd.Flags().StringP("path", "p", "/", "")
uploadCmd.Flags().StringP("parent-id", "P", "", "")
rootCmd.AddCommand(uploadCmd)
deleteCmd := &cobra.Command{Use: "delete"}
deleteCmd.Aliases = []string{"del", "rm"}
deleteCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(deleteCmd)
renameCmd := &cobra.Command{Use: "rename"}
rootCmd.AddCommand(renameCmd)
newCmd := &cobra.Command{Use: "new"}
newCmd.Aliases = []string{"n"}
newFolderCmd := &cobra.Command{Use: "folder"}
newFolderCmd.Flags().StringP("path", "p", "/", "")
newFolderCmd.Flags().StringP("parent-id", "P", "", "")
newCmd.AddCommand(newFolderCmd)
newURLCmd := &cobra.Command{Use: "url"}
newURLCmd.Flags().StringP("path", "p", "/", "")
newURLCmd.Flags().StringP("parent-id", "P", "", "")
newCmd.AddCommand(newURLCmd)
newSHACmd := &cobra.Command{Use: "sha"}
newSHACmd.Flags().StringP("path", "p", "/", "")
newSHACmd.Flags().StringP("parent-id", "P", "", "")
newCmd.AddCommand(newSHACmd)
rootCmd.AddCommand(newCmd)
tests := []struct {
name string
args []string
want []string
}{
{name: "ls injects current path", args: []string{"ls"}, want: []string{"ls", "-p", "/Movies"}},
{name: "ls rewrites relative arg", args: []string{"ls", "Kids"}, want: []string{"ls", "/Movies/Kids"}},
{name: "empty rewrites relative arg", args: []string{"empty", "Kids"}, want: []string{"empty", "/Movies/Kids"}},
{name: "rubbish keeps local rules path and rewrites remote root", args: []string{"rubbish", "--rules", "~/Library/Application Support/pikpakcli/rules/rubbish_rules.txt", "/"}, want: []string{"rubbish", "--rules", "~/Library/Application Support/pikpakcli/rules/rubbish_rules.txt", "/"}},
{name: "download without args keeps command unchanged", args: []string{"download"}, want: []string{"download"}},
{name: "download injects current path", args: []string{"download", "episode.mkv"}, want: []string{"download", "-p", "/Movies", "episode.mkv"}},
{name: "download rewrites relative path flag", args: []string{"download", "-p", "Kids", "episode.mkv"}, want: []string{"download", "-p", "/Movies/Kids", "episode.mkv"}},
{name: "download keeps trailing dot as positional target", args: []string{"download", "-g", "episode.mkv", "."}, want: []string{"download", "-p", "/Movies", "-g", "episode.mkv", "."}},
{name: "share injects current path", args: []string{"share", "episode.mkv"}, want: []string{"share", "-p", "/Movies", "episode.mkv"}},
{name: "upload without args keeps command unchanged", args: []string{"upload"}, want: []string{"upload"}},
{name: "upload injects current path", args: []string{"upload", "local.file"}, want: []string{"upload", "-p", "/Movies", "local.file"}},
{name: "delete rewrites relative args", args: []string{"delete", "a", "b/c"}, want: []string{"delete", "/Movies/a", "/Movies/b/c"}},
{name: "rm alias rewrites relative args", args: []string{"rm", "a", "b/c"}, want: []string{"rm", "/Movies/a", "/Movies/b/c"}},
{name: "rename rewrites first arg only", args: []string{"rename", "old.txt", "new.txt"}, want: []string{"rename", "/Movies/old.txt", "new.txt"}},
{name: "new folder injects current path", args: []string{"new", "folder", "a/b"}, want: []string{"new", "folder", "-p", "/Movies", "a/b"}},
{name: "new alias folder injects current path", args: []string{"n", "folder", "a/b"}, want: []string{"n", "folder", "-p", "/Movies", "a/b"}},
{name: "new url injects current path", args: []string{"new", "url", "https://example.com"}, want: []string{"new", "url", "-p", "/Movies", "https://example.com"}},
{name: "new sha injects current path", args: []string{"new", "sha", "PikPak://a|1|sha"}, want: []string{"new", "sha", "-p", "/Movies", "PikPak://a|1|sha"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
require.Equal(t, tt.want, adaptShellArgs(rootCmd, "/Movies", tt.args))
})
}
}
func TestExpandShellGlobsDownload(t *testing.T) {
rootCmd := &cobra.Command{Use: "pikpakcli"}
downloadCmd := &cobra.Command{Use: "download"}
downloadCmd.Flags().StringP("path", "p", "/", "")
downloadCmd.Flags().StringP("parent-id", "P", "", "")
rootCmd.AddCommand(downloadCmd)
args := adaptShellArgs(rootCmd, "/Movies", []string{"download", "*.mp4"})
expanded, err := expandShellGlobs(rootCmd, "/Movies", fakeFileStatProvider{
folders: map[string][]api.FileStat{
"movies-id": {
{Name: "movie-1.mp4", Kind: api.FileKindFile},
{Name: "movie-2.mp4", Kind: api.FileKindFile},
{Name: "note.txt", Kind: api.FileKindFile},
},
},
ids: map[string]string{
"/Movies": "movies-id",
},
}, args)
require.NoError(t, err)
require.Equal(t, []string{"download", "-p", "/Movies", "movie-1.mp4", "movie-2.mp4"}, expanded)
}
func TestExpandShellGlobsDelete(t *testing.T) {
rootCmd := &cobra.Command{Use: "pikpakcli"}
deleteCmd := &cobra.Command{Use: "delete"}
deleteCmd.Flags().StringP("path", "p", "/", "")
rootCmd.AddCommand(deleteCmd)
args := adaptShellArgs(rootCmd, "/Movies", []string{"delete", "*.srt"})
expanded, err := expandShellGlobs(rootCmd, "/Movies", fakeFileStatProvider{
folders: map[string][]api.FileStat{
"movies-id": {
{Name: "episode-1.srt", Kind: api.FileKindFile},
{Name: "episode-2.srt", Kind: api.FileKindFile},
{Name: "episode-1.mkv", Kind: api.FileKindFile},
},
},
ids: map[string]string{
"/Movies": "movies-id",
},
}, args)
require.NoError(t, err)
require.Equal(t, []string{"delete", "/Movies/episode-1.srt", "/Movies/episode-2.srt"}, expanded)
}
func TestExpandShellGlobsUpload(t *testing.T) {
rootCmd := &cobra.Command{Use: "pikpakcli"}
uploadCmd := &cobra.Command{Use: "upload"}
uploadCmd.Flags().StringP("path", "p", "/", "")
uploadCmd.Flags().StringP("parent-id", "P", "", "")
uploadCmd.Flags().Int64P("concurrency", "c", 16, "")
uploadCmd.Flags().StringSliceP("exn", "e", nil, "")
uploadCmd.Flags().BoolP("sync", "s", false, "")
rootCmd.AddCommand(uploadCmd)
tempDir := t.TempDir()
videoA := filepath.Join(tempDir, "a.mkv")
videoB := filepath.Join(tempDir, "b.mkv")
note := filepath.Join(tempDir, "note.txt")
require.NoError(t, os.WriteFile(videoA, []byte("a"), 0o644))
require.NoError(t, os.WriteFile(videoB, []byte("b"), 0o644))
require.NoError(t, os.WriteFile(note, []byte("c"), 0o644))
args := adaptShellArgs(rootCmd, "/Movies", []string{"upload", filepath.Join(tempDir, "*.mkv")})
expanded, err := expandShellGlobs(rootCmd, "/Movies", fakeFileStatProvider{}, args)
require.NoError(t, err)
require.Equal(t, []string{"upload", "-p", "/Movies", videoA, videoB}, expanded)
}
func TestExpandOpenGlobs(t *testing.T) {
expanded, err := expandOpenGlobs("/Movies", fakeFileStatProvider{
folders: map[string][]api.FileStat{
"movies-id": {
{Name: "movie-1.mp4", Kind: api.FileKindFile},
{Name: "movie-2.mp4", Kind: api.FileKindFile},
{Name: "cover.jpg", Kind: api.FileKindFile},
},
},
ids: map[string]string{
"/Movies": "movies-id",
},
}, []string{"*.mp4"})
require.NoError(t, err)
require.Equal(t, []string{"/Movies/movie-1.mp4", "/Movies/movie-2.mp4"}, expanded)
}
func TestCompleterCDPath(t *testing.T) {
completer := &shellAutoCompleter{
rootCmd: &cobra.Command{Use: "pikpakcli"},
fileStatSource: fakeFileStatProvider{
folders: map[string][]api.FileStat{
"": {
{Name: "Movies", Kind: api.FileKindFolder},
{Name: "Music", Kind: api.FileKindFolder},
},
"movies-id": {
{Name: "Kids Cartoons", Kind: api.FileKindFolder},
},
},
ids: map[string]string{
"/Movies": "movies-id",
},
},
currentPath: func() string {
return "/"
},
}
candidates, offset := completer.Do([]rune("cd /Mov"), len("cd /Mov"))
require.Equal(t, len([]rune("/Mov")), offset)
require.Contains(t, candidates, []rune("ies/"))
}
func TestCompleterCDPathFromCurrentDirectory(t *testing.T) {
completer := &shellAutoCompleter{
rootCmd: &cobra.Command{Use: "pikpakcli"},
fileStatSource: fakeFileStatProvider{
folders: map[string][]api.FileStat{
"movies-id": {
{Name: "Kids Cartoons", Kind: api.FileKindFolder},
{Name: "Drama", Kind: api.FileKindFolder},
},
},
ids: map[string]string{
"/Movies": "movies-id",
},
},
currentPath: func() string {
return "/Movies"
},
}
candidates, offset := completer.Do([]rune("cd Ki"), len("cd Ki"))
require.Equal(t, len([]rune("Ki")), offset)
require.Contains(t, candidates, []rune(`ds\ Cartoons/`))
}
func TestCompleterEscapesSpacesInPath(t *testing.T) {
completer := &shellAutoCompleter{
rootCmd: &cobra.Command{Use: "pikpakcli"},
fileStatSource: fakeFileStatProvider{
folders: map[string][]api.FileStat{
"": {
{Name: "My Pack", Kind: api.FileKindFolder},
},
},
},
currentPath: func() string {
return "/"
},
}
candidates, offset := completer.Do([]rune("cd /My"), len("cd /My"))
require.Equal(t, len([]rune("/My")), offset)
require.Contains(t, candidates, []rune(`\ Pack/`))
}
type fakeFileStatProvider struct {
folders map[string][]api.FileStat
ids map[string]string
}
func (f fakeFileStatProvider) GetPathFolderId(dirPath string) (string, error) {
if id, ok := f.ids[dirPath]; ok {
return id, nil
}
return "", nil
}
func (f fakeFileStatProvider) GetFolderFileStatList(parentId string) ([]api.FileStat, error) {
return f.folders[parentId], nil
}
func TestClassifyOpenCategory(t *testing.T) {
require.Equal(t, openCategoryText, classifyOpenCategory("readme.md"))
require.Equal(t, openCategoryImage, classifyOpenCategory("cover.png"))
require.Equal(t, openCategoryVideo, classifyOpenCategory("movie.mkv"))
require.Equal(t, openCategoryAudio, classifyOpenCategory("song.flac"))
require.Equal(t, openCategoryPDF, classifyOpenCategory("paper.pdf"))
require.Equal(t, openCategoryDefault, classifyOpenCategory("archive.zip"))
}
func TestBuildOpenCommand(t *testing.T) {
name, args, err := buildOpenCommand("darwin", conf.OpenConfig{}, "/tmp/demo.txt", openCategoryText)
require.NoError(t, err)
require.Equal(t, "open", name)
require.Equal(t, []string{"-a", "TextEdit", "/tmp/demo.txt"}, args)
name, args, err = buildOpenCommand("darwin", conf.OpenConfig{}, "/tmp/demo.mp4", openCategoryVideo)
require.NoError(t, err)
require.Equal(t, "open", name)
require.Equal(t, []string{"-a", "IINA", "/tmp/demo.mp4"}, args)
name, args, err = buildOpenCommand("linux", conf.OpenConfig{
Video: []string{"vlc", "--fullscreen"},
}, "/tmp/demo.mp4", openCategoryVideo)
require.NoError(t, err)
require.Equal(t, "vlc", name)
require.Equal(t, []string{"--fullscreen", "/tmp/demo.mp4"}, args)
name, args, err = buildOpenCommand("linux", conf.OpenConfig{
Default: []string{"custom-open", "--file", "{path}"},
}, "/tmp/demo.bin", openCategoryDefault)
require.NoError(t, err)
require.Equal(t, "custom-open", name)
require.Equal(t, []string{"--file", "/tmp/demo.bin"}, args)
}
func TestRemoteVideoOpenURL(t *testing.T) {
file := &api.File{}
file.Medias = []struct {
MediaID string `json:"media_id"`
MediaName string `json:"media_name"`
Video interface{} `json:"video"`
Link struct {
URL string `json:"url"`
Token string `json:"token"`
Expire time.Time `json:"expire"`
} `json:"link"`
NeedMoreQuota bool `json:"need_more_quota"`
VipTypes []interface{} `json:"vip_types"`
RedirectLink string `json:"redirect_link"`
IconLink string `json:"icon_link"`
IsDefault bool `json:"is_default"`
Priority int `json:"priority"`
IsOrigin bool `json:"is_origin"`
ResolutionName string `json:"resolution_name"`
IsVisible bool `json:"is_visible"`
Category string `json:"category"`
}{
{
Link: struct {
URL string `json:"url"`
Token string `json:"token"`
Expire time.Time `json:"expire"`
}{URL: "https://example.com/visible.m3u8"},
IsVisible: true,
},
{
Link: struct {
URL string `json:"url"`
Token string `json:"token"`
Expire time.Time `json:"expire"`
}{URL: "https://example.com/default.m3u8"},
IsDefault: true,
IsVisible: true,
},
}
require.Equal(t, "https://example.com/default.m3u8", remoteVideoOpenURL(file))
}
func TestResolveOpenTargetForVideoPrefersRemoteURL(t *testing.T) {
file := &api.File{}
file.Name = "movie.mkv"
file.Links.ApplicationOctetStream.URL = "https://example.com/download.mp4"
target, err := resolveOpenTarget(file)
require.NoError(t, err)
require.Equal(t, "https://example.com/download.mp4", target)
}
================================================
FILE: internal/utils/format.go
================================================
package utils
import "strconv"
var storageUnits = [...]string{"B", "KB", "MB", "GB", "TB", "PB"}
func FormatStorage(sizeText string, human bool) string {
if !human {
return sizeText
}
size, err := strconv.ParseFloat(sizeText, 64)
if err != nil {
return sizeText
}
unit := 0
for size >= 1024 && unit < len(storageUnits)-1 {
size /= 1024
unit++
}
if size == float64(int64(size)) {
return strconv.FormatFloat(size, 'f', 0, 64) + storageUnits[unit]
}
return strconv.FormatFloat(size, 'f', 2, 64) + storageUnits[unit]
}
================================================
FILE: internal/utils/format_test.go
================================================
package utils
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestFormatStorage(t *testing.T) {
assert.Equal(t, "2048", FormatStorage("2048", false))
assert.Equal(t, "2KB", FormatStorage("2048", true))
assert.Equal(t, "1.50KB", FormatStorage("1536", true))
assert.Equal(t, "bad", FormatStorage("bad", true))
assert.Equal(t, "1KB", FormatStorage("1024", true))
}
================================================
FILE: internal/utils/path.go
================================================
package utils
import (
"io/fs"
"os"
"path/filepath"
"regexp"
"strings"
)
func ExpandLocalPath(path string) string {
path = strings.TrimSpace(path)
if path == "" {
return path
}
path = os.ExpandEnv(path)
if path == "~" {
home, err := os.UserHomeDir()
if err == nil {
return home
}
return path
}
prefix := "~" + string(filepath.Separator)
if strings.HasPrefix(path, prefix) {
home, err := os.UserHomeDir()
if err == nil {
return filepath.Join(home, path[len(prefix):])
}
}
return path
}
func SplitSeparator(path string) []string {
if path == "" {
return []string{}
}
return strings.Split(path, string(filepath.Separator))
}
func Slash(path string) string {
// clean path
path = filepath.Clean(path)
if path == "" {
return ""
}
if path[0] == filepath.Separator {
return path[1:]
}
return path
}
func SplitRemotePath(path string) (dir string, name string) {
path = filepath.Clean(path)
if path == "." || path == string(filepath.Separator) {
return "", ""
}
name = filepath.Base(path)
if name == "." || name == string(filepath.Separator) {
return "", ""
}
dir = filepath.Dir(path)
if dir == "." {
dir = ""
}
if dir == "" {
return "", name
}
return Slash(dir), name
}
// 获取目录文件夹下的所有文件路径名
func GetUploadFilePath(basePath string, defaultRegexp []*regexp.Regexp) ([]string, error) {
rawPath := make([]string, 0)
err := filepath.WalkDir(basePath, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
// match regexp
// if matched, then skip
// else append
matchRegexp := func(name string) bool {
for _, r := range defaultRegexp {
if r.MatchString(name) {
return true
}
}
return false
}
if matchRegexp(d.Name()) {
if d.IsDir() {
return filepath.SkipDir
}
return nil
}
// skip dir
if d.IsDir() {
return nil
}
// get relative path
refPath, err := filepath.Rel(basePath, path)
if err != nil {
return err
}
// append to rawPath
rawPath = append(rawPath, refPath)
return nil
})
return rawPath, err
}
// 检查路径是否存在
func Exists(path string) (bool, error) {
_, err := os.Stat(path)
if err == nil {
return true, nil
}
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
// 不存在目录就创建
func CreateDirIfNotExist(path string) error {
exist, err := Exists(path)
if err != nil {
return err
}
if !exist {
err := os.MkdirAll(path, os.ModePerm)
if err != nil {
return err
}
}
return nil
}
// 创建空文件
func TouchFile(path string) error {
exist, err := Exists(path)
if err != nil {
return err
}
if !exist {
f, err := os.Create(path)
if err != nil {
return err
}
f.Close()
}
return nil
}
================================================
FILE: internal/utils/path_test.go
================================================
package utils
import (
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
)
func TestSplitRemotePath(t *testing.T) {
separator := string(filepath.Separator)
tests := []struct {
name string
input string
wantDir string
wantName string
}{
{
name: "full path",
input: separator + filepath.Join("Movies", "Peppa_Pig.mp4"),
wantDir: "Movies",
wantName: "Peppa_Pig.mp4",
},
{
name: "relative nested path",
input: filepath.Join("Movies", "Kids", "Peppa_Pig.mp4"),
wantDir: filepath.Join("Movies", "Kids"),
wantName: "Peppa_Pig.mp4",
},
{
name: "file name only",
input: "Peppa_Pig.mp4",
wantDir: "",
wantName: "Peppa_Pig.mp4",
},
{
name: "root path",
input: separator,
wantDir: "",
wantName: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dir, name := SplitRemotePath(tt.input)
require.Equal(t, tt.wantDir, dir)
require.Equal(t, tt.wantName, name)
})
}
}
func TestExpandLocalPath(t *testing.T) {
home, err := os.UserHomeDir()
require.NoError(t, err)
require.Equal(t, home, ExpandLocalPath("~"))
require.Equal(t, filepath.Join(home, "Downloads"), ExpandLocalPath("~/Downloads"))
require.Equal(t, filepath.Join(home, "Downloads"), ExpandLocalPath("$HOME/Downloads"))
require.Equal(t, "relative/path", ExpandLocalPath("relative/path"))
}
================================================
FILE: internal/utils/sync.go
================================================
package utils
import (
"errors"
"io"
"os"
"slices"
"strings"
"unsafe"
"github.com/52funny/pikpakcli/internal/logx"
)
var ErrSyncTxtNotEnable = errors.New("sync txt is not enable")
type SyncTxt struct {
Enable bool
FileName string
alreadySynced []string
f *os.File
}
func NewSyncTxt(fileName string, enable bool) (sync *SyncTxt, err error) {
var f *os.File = nil
var alreadySynced []string
if enable {
f, err = os.OpenFile(fileName, os.O_CREATE|os.O_RDWR|os.O_APPEND, 0666)
if err != nil {
return nil, err
}
bs, err := io.ReadAll(f)
if err != nil {
logx.Warn("sync", "read file error: ", err)
os.Exit(1)
}
// avoid end with "\n"
alreadySynced = strings.Split(
strings.TrimRight(unsafe.String(unsafe.SliceData(bs), len(bs)), "\n"),
"\n",
)
}
return &SyncTxt{
Enable: enable,
FileName: fileName,
f: f,
alreadySynced: alreadySynced,
}, nil
}
// impl Writer
func (s *SyncTxt) Write(b []byte) (n int, err error) {
if !s.Enable {
return 0, ErrSyncTxtNotEnable
}
if b[len(b)-1] != '\n' {
b = append(b, '\n')
}
// add to alreadySynced
s.alreadySynced = append(s.alreadySynced, strings.TrimRight(string(b), "\n"))
return s.f.Write(b)
}
// impl Closer
func (s *SyncTxt) Close() error {
if !s.Enable {
return ErrSyncTxtNotEnable
}
return s.f.Close()
}
// impl StringWriter
func (s *SyncTxt) WriteString(str string) (n int, err error) {
if !s.Enable {
return 0, ErrSyncTxtNotEnable
}
if str[len(str)-1] != '\n' {
str += "\n"
}
// add to alreadySynced
s.alreadySynced = append(s.alreadySynced, strings.TrimRight(str, "\n"))
return s.f.WriteString(str)
}
func (s *SyncTxt) UnSync(files []string) []string {
if !s.Enable {
return files
}
res := make([]string, 0)
for _, f := range files {
if slices.Contains(s.alreadySynced, f) {
continue
}
res = append(res, f)
}
return res
}
================================================
FILE: main.go
================================================
package main
import "github.com/52funny/pikpakcli/cli"
func main() {
cli.Execute()
}
================================================
FILE: rules/README.md
================================================
# Rubbish Rules
This directory stores text rule files used by the `rubbish` command.
## Files
- `rubbish_rules.txt`: default rubbish matching rules
## Rule Format
- One rule per line
- Empty lines are ignored
- Lines starting with `#` are comments
- Lines starting with `!` are exclude rules
## Examples
```txt
.DS_Store
*.tmp
cache/*.part
!important.tmp
!/System/*
```
## Usage
```bash
pikpakcli rubbish --rules rules/rubbish_rules.txt
pikpakcli rubbish --rules rules/rubbish_rules.txt --delete
```
## Contributions
If you find common rubbish files or directories that should be covered by the default rules, PRs are welcome.
================================================
FILE: rules/rubbish_rules.txt
================================================
# Rubbish match rules for the future `rubbish` command.
# Format:
# - one rule per line
# - empty lines are ignored
# - lines starting with # are comments
# - lines starting with ! are exclude rules
#
# Suggested matching behavior:
# - exact names: .DS_Store
# - wildcard names: *.tmp
# - path patterns: cache/*.part
# - exclude rules: !important/*.tmp
#
# Example:
# pikpakcli rubbish --rules rules/rubbish_rules.txt --delete
# macOS metadata
.DS_Store
._*
# Windows metadata
Thumbs.db
desktop.ini
# partial downloads
*.part
*.crdownload
*.download
*.tmp
# editor temp files
*.swp
*.swo
*~
*.bak
# optional examples
# *.ass.tmp
# *.srt.tmp
# exclude rules
!.gitkeep
!.keep
!/System/*
!/Applications/*