Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improvement/cldsrv 551 auth post object #5617

Draft
wants to merge 15 commits into
base: feature/CLDSRV-546-post-object
Choose a base branch
from
158 changes: 121 additions & 37 deletions lib/api/api.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const { auth, errors, policies } = require('arsenal');
const async = require('async');
const busboy = require('@fastify/busboy');

const bucketDelete = require('./bucketDelete');
const bucketDeleteCors = require('./bucketDeleteCors');
Expand Down Expand Up @@ -52,6 +53,7 @@ const objectGetRetention = require('./objectGetRetention');
const objectGetTagging = require('./objectGetTagging');
const objectHead = require('./objectHead');
const objectPut = require('./objectPut');
const objectPost = require('./objectPost');
const objectPutACL = require('./objectPutACL');
const objectPutLegalHold = require('./objectPutLegalHold');
const objectPutTagging = require('./objectPutTagging');
Expand Down Expand Up @@ -112,7 +114,7 @@ const api = {

// no need to check auth on website or cors preflight requests
if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' ||
apiMethod === 'corsPreflight') {
apiMethod === 'corsPreflight') {
request.actionImplicitDenies = false;
return this[apiMethod](request, log, callback);
}
Expand Down Expand Up @@ -158,7 +160,7 @@ const api = {
// second item checks s3:GetObject(Version)Tagging action
if (!authResults[1].isAllowed) {
log.trace('get tagging authorization denial ' +
'from Vault');
'from Vault');
returnTagCount = false;
}
} else {
Expand All @@ -184,8 +186,114 @@ const api = {
}
return { returnTagCount, isImplicitDeny };
}
let bb;
let fileEventData = null;

if (apiMethod === 'objectPost') {
bb = busboy({ headers: request.headers });
}

return async.waterfall([
next => {
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
return next(null);
}
if (apiMethod === 'objectPost' && request.headers['content-type'].includes('multipart/form-data')) {
writeContinue(request, response);

let algoOK = false;
let credOK = false;
let dateOK = false;
let sigOK = false;
let policyOK = false;
request.formData = {};
let totalFieldSize = 0;
const MAX_FIELD_SIZE = 20 * 1024; // 20KB
bb.on('field', (fieldname, val) => {
totalFieldSize += Buffer.byteLength(val, 'utf8');
if (totalFieldSize > MAX_FIELD_SIZE) {
const err = errors.MaxPostPreDataLengthExceeded;
bb.emit('error', err);
}
// Convert fieldname to lowercase for case-insensitive comparison
const lowerFieldname = fieldname.toLowerCase();
request.formData[lowerFieldname] = val;
if (lowerFieldname === 'x-amz-algorithm') {
algoOK = true;
}
if (lowerFieldname === 'x-amz-credential') {
credOK = true;
}
if (lowerFieldname === 'x-amz-date') {
dateOK = true;
}
if (lowerFieldname === 'x-amz-signature') {
sigOK = true;
}
if (lowerFieldname === 'policy') {
policyOK = true;
}
});

bb.on('file', (fieldname, file, filename, encoding, mimetype) => {
fileEventData = { fieldname, file, filename, encoding, mimetype };
if (algoOK && credOK && dateOK && sigOK && policyOK) {
return next(null);
}
return undefined;
});

bb.on('finish', () => {
// if fields are found but no file, continue
if ((algoOK && credOK && dateOK && sigOK && policyOK) && !fileEventData) {
return next(null);
}
return undefined;
});

bb.on('error', (err) => {
log.trace('Error processing form data:', {
error: err,
});
request.unpipe(bb);
return next(err);
});

request.pipe(bb);
} else {
// issue 100 Continue to the client
writeContinue(request, response);
const MAX_POST_LENGTH = request.method === 'POST' ?
1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB
const post = [];
let postLength = 0;
request.on('data', chunk => {
postLength += chunk.length;
// Sanity check on post length
if (postLength <= MAX_POST_LENGTH) {
post.push(chunk);
}
});

request.on('error', err => {
log.trace('error receiving request', {
error: err,
});
return next(errors.InternalError);
});

request.on('end', () => {
if (postLength > MAX_POST_LENGTH) {
log.error('body length is too long for request type',
{ postLength });
return next(errors.InvalidRequest);
}
request.post = Buffer.concat(post, postLength).toString();
return next(null);
});
}
return undefined;
},
next => auth.server.doAuth(
request, log, (err, userInfo, authorizationResults, streamingV4Params) => {
if (err) {
Expand All @@ -200,41 +308,7 @@ const api = {
authNames.userName = userInfo.getIAMdisplayName();
}
log.addDefaultFields(authNames);
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
return next(null, userInfo, authorizationResults, streamingV4Params);
}
// issue 100 Continue to the client
writeContinue(request, response);
const MAX_POST_LENGTH = request.method === 'POST' ?
1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB
const post = [];
let postLength = 0;
request.on('data', chunk => {
postLength += chunk.length;
// Sanity check on post length
if (postLength <= MAX_POST_LENGTH) {
post.push(chunk);
}
});

request.on('error', err => {
log.trace('error receiving request', {
error: err,
});
return next(errors.InternalError);
});

request.on('end', () => {
if (postLength > MAX_POST_LENGTH) {
log.error('body length is too long for request type',
{ postLength });
return next(errors.InvalidRequest);
}
// Convert array of post buffers into one string
request.post = Buffer.concat(post, postLength).toString();
return next(null, userInfo, authorizationResults, streamingV4Params);
});
return undefined;
return next(null, userInfo, authorizationResults, streamingV4Params);
},
// Tag condition keys require information from CloudServer for evaluation
(userInfo, authorizationResults, streamingV4Params, next) => tagConditionKeyAuth(
Expand Down Expand Up @@ -271,6 +345,15 @@ const api = {
return acc;
}, {});
}
if (apiMethod === 'objectPost') {
request._response = response;
if (fileEventData) {
request.fileEventData = fileEventData;
request.headers['content-type'] = fileEventData.mimetype;
}
return this[apiMethod](userInfo, request, streamingV4Params,
log, callback, authorizationResults);
}
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
request._response = response;
return this[apiMethod](userInfo, request, streamingV4Params,
Expand Down Expand Up @@ -337,6 +420,7 @@ const api = {
objectCopy,
objectHead,
objectPut,
objectPost,
objectPutACL,
objectPutLegalHold,
objectPutTagging,
Expand Down
14 changes: 12 additions & 2 deletions lib/api/apiUtils/object/createAndStoreObject.js
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,18 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
return next(null, null, null);
}
return dataStore(objectKeyContext, cipherBundle, request, size,
streamingV4Params, backendInfo, log, next);
// Object Post receives a file stream.
// This is to be used to store data instead of the request stream itself.

let stream;

if (request.apiMethod === 'objectPost') {
stream = request.fileEventData ? request.fileEventData.file : undefined;
} else {
stream = request;
}

return dataStore(objectKeyContext, cipherBundle, stream, size, streamingV4Params, backendInfo, log, next);
},
function processDataResult(dataGetInfo, calculatedHash, next) {
if (dataGetInfo === null || dataGetInfo === undefined) {
Expand Down
2 changes: 1 addition & 1 deletion lib/api/apiUtils/object/prepareStream.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ const V4Transform = require('../../../auth/streamingV4/V4Transform');
* the type of request requires them
*/
function prepareStream(stream, streamingV4Params, log, errCb) {
if (stream.headers['x-amz-content-sha256'] ===
if (stream && stream.headers && stream.headers['x-amz-content-sha256'] ===
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
if (typeof streamingV4Params !== 'object') {
// this might happen if the user provided a valid V2
Expand Down
135 changes: 135 additions & 0 deletions lib/api/objectPost.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
const async = require('async');
const { errors, versioning } = require('arsenal');
const { PassThrough } = require('stream');

const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
const { standardMetadataValidateBucketAndObj } = require('../metadata/metadataUtils');
const { config } = require('../Config');
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
const writeContinue = require('../utilities/writeContinue');
const { overheadField } = require('../../constants');


const versionIdUtils = versioning.VersionID;


/**
* POST Object in the requested bucket. Steps include:
* validating metadata for authorization, bucket and object existence etc.
* store object data in datastore upon successful authorization
* store object location returned by datastore and
* object's (custom) headers in metadata
* return the result in final callback
*
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
* @param {request} request - request object given by router,
* includes normalized headers
* @param {object | undefined } streamingV4Params - if v4 auth,
* object containing accessKey, signatureFromRequest, region, scopeDate,
* timestamp, and credentialScope
* (to be used for streaming v4 auth if applicable)
* @param {object} log - the log request
* @param {Function} callback - final callback to call with the result
* @return {undefined}
*/
function objectPost(authInfo, request, streamingV4Params, log, callback) {
const {
headers,
method,
} = request;
let parsedContentLength = 0;
const passThroughStream = new PassThrough();
const requestType = request.apiMethods || 'objectPost';
const valParams = {
authInfo,
bucketName: request.formData.bucket,
objectKey: request.formData.key,
requestType,
request,
};
const canonicalID = authInfo.getCanonicalID();


log.trace('owner canonicalID to send to data', { canonicalID });
return standardMetadataValidateBucketAndObj(valParams, request.actionImplicitDenies, log,
(err, bucket, objMD) => {
const responseHeaders = collectCorsHeaders(headers.origin,
method, bucket);

if (err && !err.AccessDenied) {
log.trace('error processing request', {
error: err,
method: 'metadataValidateBucketAndObj',
});
return callback(err, responseHeaders);
}
if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
log.trace('deleted flag on bucket and request ' +
'from non-owner account');
return callback(errors.NoSuchBucket);
}

return async.waterfall([
function countPOSTFileSize(next) {
if (!request.fileEventData || !request.fileEventData.file) {
return next();
}
request.fileEventData.file.on('data', (chunk) => {
parsedContentLength += chunk.length;
passThroughStream.write(chunk);
});

request.fileEventData.file.on('end', () => {
passThroughStream.end();
// Setting the file in the request avoids the need to make changes to createAndStoreObject's
// parameters and thus all it's subsequent calls. This is necessary as the stream used to create
// the object is that of the request directly; something we must work around
// to use the file data produced from the multipart form data.
/* eslint-disable no-param-reassign */
request.fileEventData.file = passThroughStream;
/* eslint-disable no-param-reassign */
// Here parsedContentLength will have the total size of the file
// This is used when calculating the size of the object in createAndStoreObject
request.parsedContentLength = parsedContentLength;
return next();
});
return undefined;
},
function objectCreateAndStore(next) {
writeContinue(request, request._response);
return createAndStoreObject(request.bucketName,
bucket, request.formData.key, objMD, authInfo, canonicalID, null,
request, false, streamingV4Params, overheadField, log, next);
},
], (err, storingResult) => {
if (err) {
return callback(err, responseHeaders);
}
setExpirationHeaders(responseHeaders, {
lifecycleConfig: bucket.getLifecycleConfiguration(),
objectParams: {
key: request.key,
date: storingResult.lastModified,
tags: storingResult.tags,
},
});
if (storingResult) {
// ETag's hex should always be enclosed in quotes
responseHeaders.ETag = `"${storingResult.contentMD5}"`;
}
const vcfg = bucket.getVersioningConfiguration();
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
if (isVersionedObj) {
if (storingResult && storingResult.versionId) {
responseHeaders['x-amz-version-id'] =
versionIdUtils.encode(storingResult.versionId,
config.versionIdEncodingType);
}
}
return callback(null, responseHeaders);
});
});
}

module.exports = objectPost;
Loading
Loading