Repository: orangewise/s3-zip
Branch: master
Commit: c8c52f68dffa
Files: 24
Total size: 36.9 KB
Directory structure:
gitextract_9j3z6aks/
├── .github/
│ └── workflows/
│ ├── claude-code-review.yml
│ ├── claude.yml
│ ├── npm-publish.yml
│ └── test.yml
├── .gitignore
├── .npmrc
├── LICENSE
├── README.md
├── aws_lambda.md
├── package.json
├── s3-zip.js
└── test/
├── fixtures/
│ ├── empty.txt
│ ├── file.txt
│ └── folder/
│ ├── a/
│ │ └── file.txt
│ └── b/
│ └── file.txt
├── test-coverage-missing-lines.js
├── test-password-protected-duplicate.js
├── test-s3-error-on-stream.js
├── test-s3-password-protected.js
├── test-s3-same-file-alt-names.js
├── test-s3-v2-client-error.js
├── test-s3-zip-alt-names.js
├── test-s3-zip-unique-prefix.js
└── test-s3-zip.js
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/workflows/claude-code-review.yml
================================================
name: Claude Code Review
on:
pull_request:
types: [opened, synchronize, ready_for_review, reopened]
# Optional: Only run on specific file changes
# paths:
# - "src/**/*.ts"
# - "src/**/*.tsx"
# - "src/**/*.js"
# - "src/**/*.jsx"
jobs:
claude-review:
# Optional: Filter by PR author
# if: |
# github.event.pull_request.user.login == 'external-contributor' ||
# github.event.pull_request.user.login == 'new-developer' ||
# github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code Review
id: claude-review
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
plugin_marketplaces: 'https://github.com/anthropics/claude-code.git'
plugins: 'code-review@claude-code-plugins'
prompt: '/code-review:code-review ${{ github.repository }}/pull/${{ github.event.pull_request.number }}'
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options
================================================
FILE: .github/workflows/claude.yml
================================================
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
actions: read # Required for Claude to read CI results on PRs
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
# This is an optional setting that allows Claude to read CI results on PRs
additional_permissions: |
actions: read
# Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
# prompt: 'Update the pull request description to include a summary of changes.'
# Optional: Add claude_args to customize behavior and configuration
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options
# claude_args: '--allowed-tools Bash(gh pr:*)'
================================================
FILE: .github/workflows/npm-publish.yml
================================================
name: Publish to npm
on:
workflow_dispatch:
push:
tags:
- 'v*.*.*'
jobs:
publish:
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
registry-url: 'https://registry.npmjs.org'
- name: Install dependencies
run: npm install
- name: Run tests
run: npm test
- name: Publish to npm
run: npm publish --provenance --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
================================================
FILE: .github/workflows/test.yml
================================================
name: Test
on:
pull_request:
branches: [master]
types: [opened, synchronize, reopened]
workflow_dispatch:
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18, 20, 22]
steps:
- uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Install dependencies
run: npm i
- name: Run tests
run: npm test
================================================
FILE: .gitignore
================================================
.nyc_output/*
coverage/*
node_modules/*
test/*.zip
test/*.tar
test/*.tar.gz
.idea/
================================================
FILE: .npmrc
================================================
package-lock=false
================================================
FILE: LICENSE
================================================
MIT License
Copyright (c) 2019 Ronald Luitwieler.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: README.md
================================================
# s3-zip
[![npm version][npm-badge]][npm-url]
[![Build Status][gh-actions-badge]][gh-actions-url]
[](http://standardjs.com/)
Download selected files from an Amazon S3 bucket as a zip file.
## Install
```
npm install s3-zip
```
## AWS Configuration
Refer to the [AWS SDK][aws-sdk-url] for authenticating to AWS prior to using this plugin.
## Usage
### Zip specific files
```javascript
const fs = require('fs')
const join = require('path').join
const s3Zip = require('s3-zip')
const region = 'bucket-region'
const bucket = 'name-of-s3-bucket'
const folder = 'name-of-bucket-folder/'
const file1 = 'Image A.png'
const file2 = 'Image B.png'
const file3 = 'Image C.png'
const file4 = 'Image D.png'
const output = fs.createWriteStream(join(__dirname, 'use-s3-zip.zip'))
s3Zip
.archive({ region: region, bucket: bucket}, folder, [file1, file2, file3, file4])
.pipe(output)
```
You can also pass a custom S3 client. For example if you want to zip files from a S3 compatible storage:
```javascript
const { S3Client } = require('@aws-sdk/client-s3')
const s3Client = new S3Client({
region: 'us-east-1',
endpoint: 'http://localhost:9000',
forcePathStyle: true
})
s3Zip
.archive({ s3: s3Client, bucket: bucket }, folder, [file1, file2])
.pipe(output)
```
**Note:** When passing a custom S3 client, it must be an AWS SDK v3 client (from `@aws-sdk/client-s3`). AWS SDK v2 clients are not supported and will result in an error.
### Zip files with AWS Lambda
Example of s3-zip in combination with [AWS Lambda](aws_lambda.md).
### Zip a whole bucket folder
```javascript
const fs = require('fs')
const join = require('path').join
const {
S3Client
} = require("@aws-sdk/client-s3")
const s3Zip = require('s3-zip')
const XmlStream = require('xml-stream')
const region = 'bucket-region'
const bucket = 'name-of-s3-bucket'
const folder = 'name-of-bucket-folder/'
const s3 = new S3Client({ region: region })
const params = {
Bucket: bucket,
Prefix: folder
}
const filesArray = []
const files = s3.listObjects(params).createReadStream()
const xml = new XmlStream(files)
xml.collect('Key')
xml.on('endElement: Key', function(item) {
filesArray.push(item['$text'].substring(folder.length))
})
xml
.on('end', function () {
zip(filesArray)
})
function zip(files) {
console.log(files)
const output = fs.createWriteStream(join(__dirname, 'use-s3-zip.zip'))
s3Zip
.archive({ region: region, bucket: bucket, preserveFolderStructure: true }, folder, files)
.pipe(output)
}
```
### Tar format support
```javascript
s3Zip
.setFormat('tar')
.archive({ region: region, bucket: bucket }, folder, [file1, file2])
.pipe(output)
```
### Zip a file with protected password
```javascript
s3Zip
.setRegisterFormatOptions('zip-encrypted', require("archiver-zip-encrypted"))
.setFormat('zip-encryptable')
.setArchiverOptions({zlib: {level: 8}, encryptionMethod: 'aes256', password: '123'})
.archive({ region: region, bucket: bucket }, folder, [file1, file2])
.pipe(output)
```
### Archiver options
We use [archiver][archiver-url] to create archives. To pass your options to it, use `setArchiverOptions` method:
```javascript
s3Zip
.setFormat('tar')
.setArchiverOptions({ gzip: true })
.archive({ region: region, bucket: bucket }, folder, [file1, file2])
```
### Organize your archive with custom paths and permissions
You can pass an array of objects with type [EntryData][entrydata-url] to organize your archive.
```javascript
const files = ['flower.jpg', 'road.jpg']
const archiveFiles = [
{ name: 'newFolder/flower.jpg' },
/* _rw_______ */
{ name: 'road.jpg', mode: parseInt('0600', 8) }
];
s3Zip.archive({ region: region, bucket: bucket }, folder, files, archiveFiles)
```
### Using with ExpressJS
`s3-zip` works with any framework which leverages on NodeJS Streams including ExpressJS.
```javascript
const s3Zip = require('s3-zip')
app.get('/download', (req, res) => {
s3Zip
.archive({ region: region, bucket: bucket }, '', 'abc.jpg')
.pipe(res)
})
```
Above should stream out the file in the response of the request.
### Debug mode
Enable debug mode to see the logs:
```javascript
s3Zip.archive({ region: region, bucket: bucket, debug: true }, folder, files)
```
## Testing
Tests are written in Node Tap, run them like this:
```
npm t
```
If you would like a more fancy coverage report:
```
npm run coverage
```
## Publishing
This package is automatically published to NPM when a new release is created on GitHub. The publishing workflow:
1. Triggers on GitHub releases
2. Runs tests to ensure quality
3. Publishes to NPM using the `NPM_TOKEN` secret
### Setup for Maintainers
To enable automatic publishing, the repository requires an `NPM_TOKEN` secret to be configured in GitHub:
1. Generate an NPM access token with publish permissions
2. Add it as a repository secret named `NPM_TOKEN` in GitHub Settings > Secrets and variables > Actions
The workflow can also be triggered manually from the Actions tab for testing purposes.
[aws-sdk-url]: https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/configuring-the-jssdk.html
[npm-badge]: https://badge.fury.io/js/s3-zip.svg
[npm-url]: https://badge.fury.io/js/s3-zip
[gh-actions-badge]: https://github.com/orangewise/s3-zip/actions/workflows/test.yml/badge.svg?branch=master
[gh-actions-url]: https://github.com/orangewise/s3-zip/actions/workflows/test.yml
[archiver-url]: https://www.npmjs.com/package/archiver
[entrydata-url]: https://archiverjs.com/docs/global.html#EntryData
================================================
FILE: aws_lambda.md
================================================
# Using s3-zip in combination with AWS Lambda
## Create a lambda function
```javascript
const { Upload } = require("@aws-sdk/lib-storage");
const { S3 } = require("@aws-sdk/client-s3");
const s3Zip = require('s3-zip')
const {Readable} = require('stream')
exports.handler = function (event, context) {
console.log('event', event)
const region = event.region
const bucket = event.bucket
const folder = event.folder
const files = event.files
const zipFileName = event.zipFileName
// Create body stream
try {
const writable = s3Zip.archive({ region: region, bucket: bucket}, folder, files)
const body = Readable.from(writable)
const zipParams = { params: { Bucket: bucket, Key: folder + zipFileName } }
const zipFile = new S3(zipParams)
new Upload({
client: zipFile,
params: { Body: body }
})
.on('httpUploadProgress', function (evt) { console.log(evt) })
.done().then(
(r) => {
console.log(r)
context.succeed(r)
},
(e) => {
console.log('zipFile.upload error', e)
context.fail(e)
}
)
} catch (e) {
const err = 'catched error: ' + e
console.log(err)
context.fail(err)
}
}
```
## Invoke the function
```javascript
const { LambdaClient } = require("@aws-sdk/client-lambda");
const region = 'bucket-region'
const bucket = 'name-of-s3-bucket'
const folder = 'name-of-bucket-folder/'
const file1 = 'Image A.png'
const file2 = 'Image B.png'
const file3 = 'Image C.png'
const file4 = 'Image D.png'
const lambda = new LambdaClient({
region
})
const files = [file1, file2, file3, file4]
const payload = JSON.stringify({
'region' : region,
'bucket' : bucket,
'folder' : folder,
'files' : files,
'zipFileName': 'bla.zip'
})
const params = {
FunctionName : 'NAME_OF_YOUR_LAMBDA_FUNCTION',
Payload : payload
}
lambda.invoke(params, function (err, data) {
if (err) console.log(err, err.stack) // an error occurred
else console.log(data) // successful response
})
```
================================================
FILE: package.json
================================================
{
"name": "s3-zip",
"version": "3.3.3",
"description": "Download selected files from an Amazon S3 bucket as a zip file.",
"main": "s3-zip.js",
"engines": {
"node": ">=18"
},
"scripts": {
"test": "standard && tap --cov test/test*.js",
"coverage": "npm test -- --cov --coverage-report=lcov"
},
"repository": {
"type": "git",
"url": "git+https://github.com/orangewise/s3-zip.git"
},
"keywords": [
"amazon",
"aws",
"S3",
"zip",
"files"
],
"author": "Ronald Luitwieler",
"license": "MIT",
"bugs": {
"url": "https://github.com/orangewise/s3-zip/issues"
},
"homepage": "https://github.com/orangewise/s3-zip#readme",
"dependencies": {
"archiver": "^6.0.1",
"normalize-path": "^3.0.0",
"s3-files": "^3.0.0"
},
"devDependencies": {
"archiver-zip-encryptable": "^1.0.5",
"concat-stream": "^2.0.0",
"proxyquire": "^2.1.3",
"sinon": "^16.0.0",
"standard": "^17.1.0",
"stream-array": "^1.1.2",
"tap": "^16.3.8",
"tar": "^6.2.0",
"yauzl": "^2.10.0"
}
}
================================================
FILE: s3-zip.js
================================================
const s3Files = require('s3-files')
const archiver = require('archiver')
const s3Zip = {}
module.exports = s3Zip
s3Zip.archive = function (opts, folder, filesS3, filesZip) {
const self = this
let connectionConfig
this.folder = folder
self.debug = opts.debug || false
if ('s3' in opts) {
// Validate that the provided S3 client is compatible with AWS SDK v3
if (!opts.s3 || typeof opts.s3.send !== 'function') {
throw new Error('The provided S3 client must be an AWS SDK v3 client with a .send() method. ' +
'Please use @aws-sdk/client-s3 (v3) instead of aws-sdk (v2).')
}
connectionConfig = {
s3: opts.s3
}
} else {
connectionConfig = {
region: opts.region
}
}
connectionConfig.bucket = opts.bucket
self.client = s3Files.connect(connectionConfig)
const keyStream = self.client.createKeyStream(folder, filesS3)
const preserveFolderStructure = opts.preserveFolderStructure === true || filesZip
const fileStream = s3Files.createFileStream(keyStream, preserveFolderStructure)
const archive = self.archiveStream(fileStream, filesS3, filesZip)
return archive
}
s3Zip.archiveStream = function (stream, filesS3, filesZip) {
const self = this
const folder = this.folder || ''
if (this.registerFormat) {
// Only register the format if it hasn't been registered before
if (!archiver.isRegisteredFormat(this.registerFormat)) {
archiver.registerFormat(this.registerFormat, this.formatModule)
}
}
const archive = archiver(this.format || 'zip', this.archiverOpts || {})
archive.on('error', function (err) {
self.debug && console.log('archive error', err)
})
stream
.on('data', function (file) {
if (file.path[file.path.length - 1] === '/') {
self.debug && console.log('don\'t append to zip', file.path)
return
}
let fname
if (filesZip) {
// Place files_s3[i] into the archive as files_zip[i]
const i = filesS3.indexOf(file.path.startsWith(folder) ? file.path.substring(folder.length) : file.path)
fname = (i >= 0 && i < filesZip.length) ? filesZip[i] : file.path
filesS3[i] = ''
} else {
// Just use the S3 file name
fname = file.path
}
const entryData = typeof fname === 'object' ? fname : { name: fname }
self.debug && console.log('append to zip', fname)
if (file.data.length === 0) {
archive.append('', entryData)
} else {
archive.append(file.data, entryData)
}
})
.on('end', function () {
self.debug && console.log('end -> finalize')
archive.finalize()
})
.on('error', function (err) {
archive.emit('error', err)
})
return archive
}
s3Zip.setFormat = function (format) {
this.format = format
return this
}
s3Zip.setArchiverOptions = function (archiverOpts) {
this.archiverOpts = archiverOpts
return this
}
s3Zip.setRegisterFormatOptions = function (registerFormat, formatModule) {
this.registerFormat = registerFormat
this.formatModule = formatModule
return this
}
================================================
FILE: test/fixtures/empty.txt
================================================
================================================
FILE: test/fixtures/file.txt
================================================
Howdy.
Howdy.
Howdy.
================================================
FILE: test/fixtures/folder/a/file.txt
================================================
a
================================================
FILE: test/fixtures/folder/b/file.txt
================================================
b
================================================
FILE: test/test-coverage-missing-lines.js
================================================
// Test to cover missing lines for full coverage
const t = require('tap')
const Stream = require('stream')
const sinon = require('sinon')
const proxyquire = require('proxyquire')
// Mock s3Files to avoid real S3 calls
const mockS3Files = {
connect: sinon.stub().returns({
createKeyStream: sinon.stub().returns(new Stream())
}),
createFileStream: sinon.stub().returns(new Stream())
}
const s3Zip = proxyquire('../s3-zip.js', {
's3-files': mockS3Files
})
t.test('test archive with valid AWS SDK v3 client', function (child) {
// Mock valid AWS SDK v3 client (has .send method)
const awsV3Client = {
send: function (command) {
return Promise.resolve({ Body: Buffer.from('test data') })
}
}
try {
// This should work and cover line 21: connectionConfig = { s3: opts.s3 }
const archive = s3Zip.archive(
{ s3: awsV3Client, bucket: 'test-bucket' },
'folder/',
['test-file.txt']
)
child.type(archive, 'object', 'Should return archive object')
child.end()
} catch (error) {
child.fail(`Should not throw error with valid SDK v3 client: ${error.message}`)
child.end()
}
})
t.test('test archiveStream with debug mode and directory paths', function (child) {
const rs = new Stream()
rs.readable = true
// Enable debug mode to cover lines 59-60
s3Zip.debug = true
const archive = s3Zip.archiveStream(rs, [])
// Emit a directory path to trigger the debug log
rs.emit('data', { data: Buffer.alloc(0), path: 'test-folder/' })
rs.emit('end')
child.type(archive, 'object', 'Should return archive object')
child.end()
})
t.test('test archiveStream with file not in filesS3 array', function (child) {
const rs = new Stream()
rs.readable = true
// Test the ternary on line 66 - when file is not found in filesS3 array
const archive = s3Zip.archiveStream(rs, ['different-file.txt'], ['renamed.txt'])
// Emit a file that's not in the filesS3 array
rs.emit('data', {
data: Buffer.from('test content'),
path: 'not-in-list.txt'
})
rs.emit('end')
child.type(archive, 'object', 'Should return archive object')
child.end()
})
t.test('test archiveStream with debug and archive error', function (child) {
const rs = new Stream()
rs.readable = true
// Enable debug mode to cover line 54
s3Zip.debug = true
const archive = s3Zip.archiveStream(rs, [])
// Force an archive error to trigger the debug log on line 54
setImmediate(() => {
archive.emit('error', new Error('test error'))
})
rs.emit('end')
child.type(archive, 'object', 'Should return archive object')
child.end()
})
================================================
FILE: test/test-password-protected-duplicate.js
================================================
const s3Zip = require('../s3-zip.js')
const t = require('tap')
const archiverZipEncryptable = require('archiver-zip-encryptable')
t.test('test duplicate format registration does not error', function (child) {
// First registration should work
s3Zip.setRegisterFormatOptions('zip-encryptable', archiverZipEncryptable)
// Try to register the format via archiveStream simulation
const mockStream = {
on: function (event, callback) {
if (event === 'end') {
setTimeout(callback, 10) // Simulate async behavior
} else if (event === 'data') {
// Don't emit any data
} else if (event === 'error') {
// Don't emit any errors
}
return this
}
}
try {
// First call to archiveStream (should register format)
const archive1 = s3Zip.archiveStream(mockStream, [], [])
child.ok(archive1, 'First archiveStream call succeeded')
// Second call to archiveStream (should NOT fail due to duplicate registration)
const archive2 = s3Zip.archiveStream(mockStream, [], [])
child.ok(archive2, 'Second archiveStream call succeeded')
child.end()
} catch (err) {
child.fail(`archiveStream calls failed: ${err.message}`)
child.end()
}
})
================================================
FILE: test/test-s3-error-on-stream.js
================================================
let s3Zip = require('../s3-zip.js')
const t = require('tap')
const fs = require('fs')
const Stream = require('stream')
const concat = require('concat-stream')
const join = require('path').join
const proxyquire = require('proxyquire')
const sinon = require('sinon')
const fileStream = function (file) {
const rs = new Stream()
rs.readable = true
const fileStream = fs.createReadStream(join(__dirname, file))
fileStream
.pipe(concat(
function buffersEmit (buffer) {
rs.emit('error', new Error())
})
)
fileStream
.on('end', function () {
console.log('end fileStream')
rs.emit('end')
})
return rs
}
t.test('test if error on filestream with archiveStream', function (child) {
const stream = fileStream('./fixtures/file.txt')
const files = ['foo.png']
s3Zip.archiveStream(stream, files)
child.end()
})
t.test('test if error on filestream with archive', function (child) {
const stream = fileStream('./fixtures/file.txt')
s3Zip = proxyquire('../s3-zip.js', {
's3-files': { createFileStream: sinon.stub().returns(stream) }
})
const files = ['foo.png']
s3Zip.archive({ region: 'region', bucket: 'bucket' }, 'folder', files)
child.end()
})
================================================
FILE: test/test-s3-password-protected.js
================================================
let s3Zip = require('../s3-zip.js')
const t = require('tap')
const fs = require('fs')
const Stream = require('stream')
const concat = require('concat-stream')
const join = require('path').join
const streamify = require('stream-array')
const archiverZipEncryptable = require('archiver-zip-encryptable')
const { exec } = require('child_process')
const fileStreamForFiles = function (files, preserveFolderPath) {
const rs = new Stream()
rs.readable = true
let fileCounter = 0
streamify(files).on('data', function (file) {
fileCounter += 1
const fileStream = fs.createReadStream(join(__dirname, file))
fileStream.pipe(
concat(function buffersEmit (buffer) {
// console.log('buffers concatenated, emit data for ', file);
const path = preserveFolderPath ? file : file.replace(/^.*[\\/]/, '')
rs.emit('data', { data: buffer, path })
})
)
fileStream.on('end', function () {
fileCounter -= 1
if (fileCounter < 1) {
// console.log('all files processed, emit end');
rs.emit('end')
}
})
})
return rs
}
const file1 = 'a/file.txt'
const file2 = 'b/file.txt'
const sinon = require('sinon')
const proxyquire = require('proxyquire')
const s3Stub = fileStreamForFiles(
['/fixtures/folder/a/file.txt', '/fixtures/folder/b/file.txt'],
true
)
s3Zip = proxyquire('../s3-zip.js', {
's3-files': { createFileStream: sinon.stub().returns(s3Stub) }
})
t.test('test archive password protected', async child => {
const outputPath = join(__dirname, '/test-password-protected.zip')
const output = fs.createWriteStream(outputPath)
await s3Zip
.setRegisterFormatOptions('zip-encryptable', archiverZipEncryptable)
.setFormat('zip-encryptable')
.setArchiverOptions({
zlib: { level: 8 },
forceLocalTime: true,
password: 'test'
})
.archive({ region: 'region', bucket: 'bucket' }, '/fixtures/folder/', [
file1,
file2
])
.pipe(output)
.on('finish', async () => {
exec(
`unzip -P test ${outputPath} -d ${outputPath}/../testUnzipped/`,
() => {
if (
fs.existsSync(
`${outputPath}/../testUnzipped/fixtures/folder/a/file.txt`
)
) {
child.ok(true, 'file exist after unzip')
}
}
)
})
})
================================================
FILE: test/test-s3-same-file-alt-names.js
================================================
// Test s3-zip BUT using alternate file names for the same file which is listed multiple times
const s3Zip = require('../s3-zip.js')
const t = require('tap')
const fs = require('fs')
const Stream = require('stream')
const concat = require('concat-stream')
const join = require('path').join
const streamify = require('stream-array')
const tar = require('tar')
const sinon = require('sinon')
const proxyquire = require('proxyquire')
const fileStreamForFiles = function (files, preserveFolderPath) {
const rs = new Stream()
rs.readable = true
let fileCounter = 0
streamify(files).on('data', function (file) {
fileCounter += 1
const fileStream = fs.createReadStream(join(__dirname, file))
fileStream.pipe(
concat(function buffersEmit (buffer) {
// console.log('buffers concatenated, emit data for ', file);
const path = preserveFolderPath ? file : file.replace(/^.*[\\/]/, '')
rs.emit('data', { data: buffer, path })
})
)
fileStream.on('end', function () {
fileCounter -= 1
if (fileCounter < 1) {
// console.log('all files processed, emit end');
rs.emit('end')
}
})
})
return rs
}
const outputFiles = [
'FILE_1_ALT_1.TXT',
'FILE_1_ALT_2.TXT'
]
const filesRead = []
t.test('test a tar archive with alternate names for one file listed many times', function (child) {
const inputFiles = [
'/fixtures/folder/a/file.txt',
'/fixtures/folder/a/file.txt'
]
const outputPath = join(__dirname, '/test-same_file_alt_name.tar')
const output = fs.createWriteStream(outputPath)
const archive = s3Zip
.setFormat('tar')
.archiveStream(fileStreamForFiles(inputFiles, true), inputFiles, outputFiles)
.pipe(output)
archive.on('close', function () {
fs.createReadStream(outputPath)
.pipe(tar.list())
.on('entry', function (entry) {
filesRead.push(entry.path)
})
.on('end', function () {
child.same(filesRead, outputFiles)
child.end()
})
})
})
t.test('test archive with alternate names for one file listed many times', function (child) {
const inputFiles = [
'/fixtures/folder/a/file.txt',
'/fixtures/folder/a/file.txt'
]
const s3Zip = proxyquire('../s3-zip.js', {
's3-files': { createFileStream: sinon.stub().returns(fileStreamForFiles(inputFiles, true)) }
})
const archive = s3Zip
.archive({ region: 'region', bucket: 'bucket' },
'',
inputFiles,
outputFiles.map(file => {
return { name: file }
})
)
child.type(archive, 'object')
child.end()
})
================================================
FILE: test/test-s3-v2-client-error.js
================================================
// Test s3-zip with AWS SDK v2 client which should produce clear error
const s3Zip = require('../s3-zip.js')
const t = require('tap')
t.test('test s3-zip with AWS SDK v2 client should fail with clear error', function (child) {
// Mock AWS SDK v2 client (no .send method)
const awsV2Client = {
getObject: function (params) {
return {
promise: function () {
return Promise.resolve({ Body: Buffer.from('test data') })
}
}
}
}
try {
// This should fail with a clear error message about AWS SDK compatibility
s3Zip.archive(
{ s3: awsV2Client, bucket: 'test-bucket' },
'folder/',
['test-file.txt']
)
// If we get here without an error, the test failed
child.fail('Expected an error about AWS SDK compatibility but none was thrown')
child.end()
} catch (error) {
// We should get a clear error about AWS SDK version compatibility
child.ok(error.message.includes('AWS SDK v3'), 'Should get clear error about AWS SDK v3 requirement')
child.ok(error.message.includes('@aws-sdk/client-s3'), 'Should mention the correct package')
child.end()
}
})
t.test('test s3-zip with null s3 client should fail with clear error', function (child) {
try {
// This should fail with a clear error message about AWS SDK compatibility
s3Zip.archive(
{ s3: null, bucket: 'test-bucket' },
'folder/',
['test-file.txt']
)
// If we get here without an error, the test failed
child.fail('Expected an error about AWS SDK compatibility but none was thrown')
child.end()
} catch (error) {
// We should get a clear error about AWS SDK version compatibility
child.ok(error.message.includes('AWS SDK v3'), 'Should get clear error about AWS SDK v3 requirement')
child.ok(error.message.includes('.send() method'), 'Should mention the .send() method requirement')
child.end()
}
})
================================================
FILE: test/test-s3-zip-alt-names.js
================================================
// Test s3-zip BUT using alternate file names in the resulting zip archive
let s3Zip = require('../s3-zip.js')
const t = require('tap')
const fs = require('fs')
const Stream = require('stream')
const concat = require('concat-stream')
const yauzl = require('yauzl')
const join = require('path').join
const tar = require('tar')
const fileStream = function (file, forceError) {
const rs = new Stream()
rs.readable = true
const fileStream = fs.createReadStream(join(__dirname, file))
fileStream
.pipe(concat(
function buffersEmit (buffer) {
if (forceError) {
console.log('send end to finalize archive')
rs.emit('end')
} else {
rs.emit('data', { data: buffer, path: file })
}
})
)
fileStream
.on('end', function () {
console.log('end fileStream')
rs.emit('end')
})
return rs
}
const file1 = '/fixtures/file.txt'
const file1Alt = 'FILE_ALT.TXT'
const file1DataEntry = { name: file1Alt, mode: parseInt('0600', 8) }
// Stub: var fileStream = s3Files.createFileStream(keyStream);
const sinon = require('sinon')
const proxyquire = require('proxyquire')
const s3Stub = fileStream(file1)
s3Zip = proxyquire('../s3-zip.js', {
's3-files': { createFileStream: sinon.stub().returns(s3Stub) }
})
t.test('test archiveStream and zip file with alternate file name in zip archive', function (child) {
const output = fs.createWriteStream(join(__dirname, '/test-alt.zip'))
const s = fileStream(file1)
const archive = s3Zip
.archiveStream(s, [file1], [file1Alt])
.pipe(output)
archive.on('close', function () {
console.log('+++++++++++')
yauzl.open(join(__dirname, '/test-alt.zip'), function (err, zip) {
if (err) console.log('err', err)
zip.on('entry', function (entry) {
// console.log(entry);
child.same(entry.fileName, file1Alt)
child.same(entry.compressedSize, 11)
child.same(entry.uncompressedSize, 20)
})
zip.on('close', function () {
child.end()
})
})
})
child.type(archive, 'object')
})
t.test('test archive with alternate zip archive names', function (child) {
const archive = s3Zip
.archive({ region: 'region', bucket: 'bucket' },
'folder',
[file1],
[file1Alt]
)
child.type(archive, 'object')
child.end()
})
t.test('test a tar archive with EntryData object', function (child) {
const outputPath = join(__dirname, '/test-entrydata.tar')
const output = fs.createWriteStream(outputPath)
const archive = s3Zip
.setFormat('tar')
.archiveStream(fileStream(file1), [file1], [file1DataEntry])
.pipe(output)
archive.on('close', function () {
fs.createReadStream(outputPath)
.pipe(tar.list())
.on('entry', function (entry) {
child.same(entry.path, file1Alt)
child.same(entry.mode, parseInt('0600', 8))
})
.on('end', function () {
child.end()
})
})
})
================================================
FILE: test/test-s3-zip-unique-prefix.js
================================================
let s3Zip = require('../s3-zip.js')
const t = require('tap')
const fs = require('fs')
const Stream = require('stream')
const concat = require('concat-stream')
const yauzl = require('yauzl')
const join = require('path').join
const streamify = require('stream-array')
const fileStreamForFiles = function (files, preserveFolderPath) {
const rs = new Stream()
rs.readable = true
let fileCounter = 0
streamify(files).on('data', function (file) {
fileCounter += 1
const fileStream = fs.createReadStream(join(__dirname, file))
fileStream.pipe(
concat(function buffersEmit (buffer) {
// console.log('buffers concatenated, emit data for ', file);
const path = preserveFolderPath ? file : file.replace(/^.*[\\/]/, '')
rs.emit('data', { data: buffer, path })
})
)
fileStream.on('end', function () {
fileCounter -= 1
if (fileCounter < 1) {
// console.log('all files processed, emit end');
rs.emit('end')
}
})
})
return rs
}
const file1 = 'a/file.txt'
const file1Alt = 'file.txt'
const file2 = 'b/file.txt'
const file2Alt = 'file-1.txt'
const sinon = require('sinon')
const proxyquire = require('proxyquire')
const s3Stub = fileStreamForFiles(
['/fixtures/folder/a/file.txt', '/fixtures/folder/b/file.txt'],
true
)
s3Zip = proxyquire('../s3-zip.js', {
's3-files': { createFileStream: sinon.stub().returns(s3Stub) }
})
t.test(
'test archive with matching alternate zip archive names but unique keys',
function (child) {
const outputPath = join(__dirname, '/test-unique.zip')
const output = fs.createWriteStream(outputPath)
const archive = s3Zip
.archive(
{ region: 'region', bucket: 'bucket' },
'/fixtures/folder/',
[file1, file2],
[{ name: file1Alt }, { name: file2Alt }]
)
.pipe(output)
const altFiles = [file1Alt, file2Alt]
archive.on('close', function () {
yauzl.open(outputPath, function (err, zip) {
if (err) console.log('err', err)
zip.on('entry', function (entry) {
const i = altFiles.indexOf(entry.fileName)
if (i > -1) {
child.same(entry.fileName, altFiles[i])
altFiles.splice(i, 1)
} else {
child.ok(false, 'File not found in alternate file names list.')
}
})
zip.on('close', function () {
child.end()
})
})
})
child.type(archive, 'object')
}
)
================================================
FILE: test/test-s3-zip.js
================================================
let s3Zip = require('../s3-zip.js')
const t = require('tap')
const fs = require('fs')
const Stream = require('stream')
const concat = require('concat-stream')
const yauzl = require('yauzl')
const { join } = require('path')
const fileStream = function (file, forceError) {
const rs = new Stream()
rs.readable = true
const fileStream = fs.createReadStream(join(__dirname, file))
fileStream.pipe(
concat(function buffersEmit (buffer) {
if (forceError) {
console.log('send end to finalize archive')
rs.emit('end')
} else {
rs.emit('data', { data: buffer, path: file })
}
})
)
fileStream.on('end', function () {
console.log('end fileStream')
rs.emit('end')
})
return rs
}
const file1 = '/fixtures/file.txt'
const emptyFile = '/fixtures/empty.txt'
// Stub: var fileStream = s3Files.createFileStream(keyStream);
const sinon = require('sinon')
const proxyquire = require('proxyquire')
const s3Stub = fileStream(file1)
s3Zip = proxyquire('../s3-zip.js', {
's3-files': { createFileStream: sinon.stub().returns(s3Stub) }
})
t.test('test archiveStream and zip file', function (child) {
const output = fs.createWriteStream(join(__dirname, '/test.zip'))
const s = fileStream(file1)
const archive = s3Zip.archiveStream(s).pipe(output)
archive.on('close', function () {
console.log('+++++++++++')
yauzl.open(join(__dirname, '/test.zip'), function (err, zip) {
if (err) console.log('err', err)
zip.on('entry', function (entry) {
// console.log(entry);
child.same(entry.fileName, 'fixtures/file.txt')
child.same(entry.compressedSize, 11)
child.same(entry.uncompressedSize, 20)
})
zip.on('close', function () {
child.end()
})
})
})
child.type(archive, 'object')
})
t.test('test archive', function (child) {
const archive = s3Zip.archive(
{ region: 'region', bucket: 'bucket' },
'folder',
[file1]
)
child.type(archive, 'object')
child.end()
})
t.test('test archive on empty file', function (child) {
const output = fs.createWriteStream(join(__dirname, '/test.zip'))
const s = fileStream(emptyFile)
const archive = s3Zip.archiveStream(s).pipe(output)
archive.on('close', function () {
console.log('+++++++++++')
yauzl.open(join(__dirname, '/test.zip'), function (err, zip) {
if (err) console.log('err', err)
zip.on('entry', function (entry) {
// console.log(entry);
child.same(entry.fileName, 'fixtures/empty.txt')
child.same(entry.compressedSize, 0)
child.same(entry.uncompressedSize, 0)
})
zip.on('close', function () {
child.end()
})
})
})
child.type(archive, 'object')
})
gitextract_9j3z6aks/
├── .github/
│ └── workflows/
│ ├── claude-code-review.yml
│ ├── claude.yml
│ ├── npm-publish.yml
│ └── test.yml
├── .gitignore
├── .npmrc
├── LICENSE
├── README.md
├── aws_lambda.md
├── package.json
├── s3-zip.js
└── test/
├── fixtures/
│ ├── empty.txt
│ ├── file.txt
│ └── folder/
│ ├── a/
│ │ └── file.txt
│ └── b/
│ └── file.txt
├── test-coverage-missing-lines.js
├── test-password-protected-duplicate.js
├── test-s3-error-on-stream.js
├── test-s3-password-protected.js
├── test-s3-same-file-alt-names.js
├── test-s3-v2-client-error.js
├── test-s3-zip-alt-names.js
├── test-s3-zip-unique-prefix.js
└── test-s3-zip.js
Condensed preview — 24 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (41K chars).
[
{
"path": ".github/workflows/claude-code-review.yml",
"chars": 1433,
"preview": "name: Claude Code Review\n\non:\n pull_request:\n types: [opened, synchronize, ready_for_review, reopened]\n # Optiona"
},
{
"path": ".github/workflows/claude.yml",
"chars": 1886,
"preview": "name: Claude Code\n\non:\n issue_comment:\n types: [created]\n pull_request_review_comment:\n types: [created]\n issue"
},
{
"path": ".github/workflows/npm-publish.yml",
"chars": 684,
"preview": "name: Publish to npm\n\non:\n workflow_dispatch:\n push:\n tags:\n - 'v*.*.*'\n\njobs:\n publish:\n runs-on: ubuntu-"
},
{
"path": ".github/workflows/test.yml",
"chars": 527,
"preview": "name: Test\n\non:\n pull_request:\n branches: [master]\n types: [opened, synchronize, reopened]\n workflow_dispatch:\n\n"
},
{
"path": ".gitignore",
"chars": 83,
"preview": ".nyc_output/*\ncoverage/*\nnode_modules/*\ntest/*.zip\ntest/*.tar\ntest/*.tar.gz\n.idea/\n"
},
{
"path": ".npmrc",
"chars": 19,
"preview": "package-lock=false\n"
},
{
"path": "LICENSE",
"chars": 1075,
"preview": "MIT License\n\nCopyright (c) 2019 Ronald Luitwieler.\n\nPermission is hereby granted, free of charge, to any person obtainin"
},
{
"path": "README.md",
"chars": 5645,
"preview": "# s3-zip\n\n[![npm version][npm-badge]][npm-url]\n[![Build Status][gh-actions-badge]][gh-actions-url]\n[![JavaScript Style G"
},
{
"path": "aws_lambda.md",
"chars": 2091,
"preview": "# Using s3-zip in combination with AWS Lambda\n\n## Create a lambda function\n\n\n```javascript\nconst { Upload } = require(\"@"
},
{
"path": "package.json",
"chars": 1072,
"preview": "{\n \"name\": \"s3-zip\",\n \"version\": \"3.3.3\",\n \"description\": \"Download selected files from an Amazon S3 bucket as a zip "
},
{
"path": "s3-zip.js",
"chars": 3093,
"preview": "const s3Files = require('s3-files')\nconst archiver = require('archiver')\n\nconst s3Zip = {}\nmodule.exports = s3Zip\n\ns3Zip"
},
{
"path": "test/fixtures/empty.txt",
"chars": 0,
"preview": ""
},
{
"path": "test/fixtures/file.txt",
"chars": 20,
"preview": "Howdy.\nHowdy.\nHowdy."
},
{
"path": "test/fixtures/folder/a/file.txt",
"chars": 2,
"preview": "a\n"
},
{
"path": "test/fixtures/folder/b/file.txt",
"chars": 2,
"preview": "b\n"
},
{
"path": "test/test-coverage-missing-lines.js",
"chars": 2621,
"preview": "// Test to cover missing lines for full coverage\n\nconst t = require('tap')\nconst Stream = require('stream')\nconst sinon "
},
{
"path": "test/test-password-protected-duplicate.js",
"chars": 1226,
"preview": "const s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst archiverZipEncryptable = require('archiver-zip-encr"
},
{
"path": "test/test-s3-error-on-stream.js",
"chars": 1217,
"preview": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nc"
},
{
"path": "test/test-s3-password-protected.js",
"chars": 2347,
"preview": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nc"
},
{
"path": "test/test-s3-same-file-alt-names.js",
"chars": 2598,
"preview": "// Test s3-zip BUT using alternate file names for the same file which is listed multiple times\n\nconst s3Zip = require('."
},
{
"path": "test/test-s3-v2-client-error.js",
"chars": 1923,
"preview": "// Test s3-zip with AWS SDK v2 client which should produce clear error\n\nconst s3Zip = require('../s3-zip.js')\nconst t = "
},
{
"path": "test/test-s3-zip-alt-names.js",
"chars": 2958,
"preview": "// Test s3-zip BUT using alternate file names in the resulting zip archive\n\nlet s3Zip = require('../s3-zip.js')\nconst t "
},
{
"path": "test/test-s3-zip-unique-prefix.js",
"chars": 2486,
"preview": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nc"
},
{
"path": "test/test-s3-zip.js",
"chars": 2746,
"preview": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nc"
}
]
About this extraction
This page contains the full source code of the orangewise/s3-zip GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 24 files (36.9 KB), approximately 10.5k tokens. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.