Skip to content

Commit

Permalink
Merge pull request #4 from YOU54F/accessLevelControl
Browse files Browse the repository at this point in the history
Feat: Set access control level
  • Loading branch information
ThaNarie authored Jun 9, 2019
2 parents 7433325 + 9ea2eb7 commit 35a0c9f
Show file tree
Hide file tree
Showing 4 changed files with 93 additions and 30 deletions.
9 changes: 6 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ Options:
-g, --glob A glob on filename level to filter the files to upload [string] [default: "*.*"]
-a, --cache-control Cache control for uploaded files, can be string for single value or list of glob settings
[string] [default: ""]
-acl, --access-control-level Sets the access control level for uploaded files
[string] [default: "undefined"]
-c, --config The AWS config json path to load S3 credentials with loadFromPath. [string]
-h, --help Show help [boolean]
Expand Down Expand Up @@ -75,7 +77,8 @@ await new Uploader({
'**/settings.json': 'max-age=60', // 1 mins for settings, specific matches should go first
'**/*.json': 'max-age=300', // 5 mins for other jsons
'**/*.*': 'max-age=3600', // 1 hour for everthing else
}
},
accessControlLevel: 'bucket-owner-full-control' // optional, not passed if undefined. - available options - "private"|"public-read"|"public-read-write"|"authenticated-read"|"aws-exec-read"|"bucket-owner-read"|"bucket-owner-full-control"
}).upload();
```

Expand All @@ -88,8 +91,8 @@ in your repo. Use the following template for the config file as stated in the [A

```json
{
"accessKeyId": "<YOUR_ACCESS_KEY_ID>",
"secretAccessKey": "<YOUR_SECRET_ACCESS_KEY>",
"accessKeyId": "<YOUR_ACCESS_KEY_ID>",
"secretAccessKey": "<YOUR_SECRET_ACCESS_KEY>",
"region": "us-east-1"
}
```
Expand Down
70 changes: 43 additions & 27 deletions src/lib/Uploader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,19 @@ export type Options = {
dryRun?: boolean;
cacheControl?: string | { [key: string]: string };
s3Client?: S3;
accessControlLevel?: ObjectACL;
};

export type ObjectACL =
| 'private'
| 'public-read'
| 'public-read-write'
| 'authenticated-read'
| 'aws-exec-read'
| 'bucket-owner-read'
| 'bucket-owner-full-control'
| string;

const defaultOptions = {
dryRun: false,
concurrency: 100,
Expand Down Expand Up @@ -90,38 +101,43 @@ export default class Uploader {
gatheringSpinner.start();

return new Promise((resolve, reject) => {
glob(
`**/${globPath}`,
{ cwd: path.resolve(localPath) },
(err, files) => {
if (err) {
gatheringSpinner.fail(err);
reject(err);
}

gatheringSpinner.succeed(
`Found ${chalk.green(files.length)} files at ${chalk.blue(
localPath
)}, starting upload:`,
);

resolve(files);
},
);
glob(`**/${globPath}`, { cwd: path.resolve(localPath) }, (err, files) => {
if (err) {
gatheringSpinner.fail(err);
reject(err);
}

gatheringSpinner.succeed(
`Found ${chalk.green(files.length)} files at ${chalk.blue(localPath)}, starting upload:`,
);

resolve(files);
});
});
}

public uploadFile(localFilePath: string, remotePath: string): Promise<void> {
const body = fs.createReadStream(localFilePath);
const { dryRun, bucket: Bucket } = this.options;

const params = {
Bucket,
Key: remotePath.replace(/\\/g, '/'),
Body: body,
ContentType: mime.getType(localFilePath),
CacheControl: this.getCacheControlValue(localFilePath),
};
const { dryRun, bucket: Bucket, accessControlLevel: ACL } = this.options;
let params;
if (ACL) {
params = {
ACL,
Bucket,
Key: remotePath.replace(/\\/g, '/'),
Body: body,
ContentType: mime.getType(localFilePath),
CacheControl: this.getCacheControlValue(localFilePath),
};
} else {
params = {
Bucket,
Key: remotePath.replace(/\\/g, '/'),
Body: body,
ContentType: mime.getType(localFilePath),
CacheControl: this.getCacheControlValue(localFilePath),
};
}

return new Promise(resolve => {
if (!dryRun) {
Expand Down
7 changes: 7 additions & 0 deletions src/lib/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,13 @@ yargs
type: 'string',
nargs: 1,
})
.option('acl', {
alias: 'access-control-level',
default: undefined,
describe: 'Sets the bucket access control level for uploaded files',
type: 'string',
nargs: 1,
})
.option('a', {
alias: 'cache-control',
default: '',
Expand Down
37 changes: 37 additions & 0 deletions test/Uploader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,43 @@ describe('Uploader', () => {
(<any>s3.upload).restore();
});

it('should upload with access control level options', async function() {
this.timeout(10000);

const s3 = {
upload(_, cb) {
cb(null);
}
};
spy(s3, "upload");

uploader = new Uploader({
localPath: 'test/files',
remotePath: 'fake',
bucket: 'fake',
glob: '**/demo.png',
s3Client: <any>s3,
accessControlLevel: 'bucket-owner-full-control'
});

await uploader.upload();

const { Body, ...args} = (<any>s3.upload).lastCall.args[0];


expect(args).to.deep.equal({
ACL: 'bucket-owner-full-control',
Bucket: 'fake',
Key: 'fake/demo.png',
ContentType: 'image/png',
CacheControl: '',
});

(<any>expect(Body).to.be.a).ReadableStream;

(<any>s3.upload).restore();
});

it('should fix windows paths', async function() {
this.timeout(5000);

Expand Down

0 comments on commit 35a0c9f

Please sign in to comment.