Skip to content

Commit

Permalink
Merge pull request noobaa#7971 from nimrod-becker/backport_to_5_14
Browse files Browse the repository at this point in the history
Backport to 5.14
  • Loading branch information
nimrod-becker authored Apr 15, 2024
2 parents 31f9164 + 51db4fb commit 42b98e3
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 49 deletions.
8 changes: 4 additions & 4 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
"heapdump": "0.3.15",
"http-proxy-agent": "7.0.0",
"https-proxy-agent": "7.0.2",
"ip": "1.1.8",
"ip": "1.1.9",
"jsonwebtoken": "9.0.2",
"linux-blockutils": "0.2.0",
"lodash": "4.17.21",
Expand Down
4 changes: 2 additions & 2 deletions src/agent/block_store_services/block_store_client.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ const block_store_info_cache = new LRUCache({
max_usage: 1000,
expiry_ms: 2 * 60 * 1000,
make_key: params => params.options.address,
load: async ({ rpc_client, options }) => rpc_client.block_store.get_block_store_info(null, options),
load: async ({ rpc_client, options }) => rpc_client.block_store.get_block_store_info({}, options),
});
class BlockStoreClient {

Expand Down Expand Up @@ -98,8 +98,8 @@ class BlockStoreClient {
}
},
});
dbg.log3('writing block id to gcp: ', block_id);
await buffer_utils.write_to_stream(write_stream, data);
write_stream.end();
const data_length = data.length;
const usage = data_length ? {
size: (block_md.is_preallocated ? 0 : data_length) + encoded_md.length,
Expand Down
1 change: 0 additions & 1 deletion src/agent/block_store_services/block_store_google.js
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,6 @@ class BlockStoreGoogle extends BlockStoreBase {
dbg.log3('writing block id to cloud: ', key);
try {
await buffer_utils.write_to_stream(write_stream, data);
write_stream.end();
const usage = {
size: data.length + encoded_md.length,
count: 1
Expand Down
28 changes: 6 additions & 22 deletions src/server/object_services/object_server.js
Original file line number Diff line number Diff line change
Expand Up @@ -451,17 +451,11 @@ async function complete_object_upload(req) {
const upload_duration = time_utils.format_time_duration(took_ms);
const upload_size = size_utils.human_size(set_updates.size);
const upload_speed = size_utils.human_size(set_updates.size / took_ms * 1000);
Dispatcher.instance().activity({
system: req.system._id,
level: 'info',
event: 'obj.uploaded',
obj: obj._id,
actor: req.account && req.account._id,
desc: `${obj.key} was uploaded by ${req.account && req.account.email.unwrap()} into bucket ${req.bucket.name.unwrap()}.` +
`\nUpload size: ${upload_size}.` +
`\nUpload duration: ${upload_duration}.` +
`\nUpload speed: ${upload_speed}/sec.`,
});
dbg.log1(`${obj.key} was uploaded by ${req.account && req.account.email.unwrap()} into bucket ${req.bucket.name.unwrap()}.` +
`\nUpload size: ${upload_size}.` +
`\nUpload duration: ${upload_duration}.` +
`\nUpload speed: ${upload_speed}/sec.`,
);
return {
etag: get_etag(obj, set_updates),
version_id: MDStore.instance().get_object_version_id(set_updates),
Expand Down Expand Up @@ -884,19 +878,9 @@ async function delete_object(req) {
const { reply, obj } = req.rpc_params.version_id ?
await _delete_object_version(req) :
await _delete_object_only_key(req);

if (obj) {
Dispatcher.instance().activity({
system: req.system._id,
level: 'info',
event: 'obj.deleted',
obj: obj._id,
actor: req.account && req.account._id,
desc: `${obj.key} was deleted by ${req.account && req.account.email.unwrap()}`,
});

dbg.log1(`${obj.key} was deleted by ${req.account && req.account.email.unwrap()}`);
}

return reply;
}

Expand Down
21 changes: 5 additions & 16 deletions src/test/unit_tests/test_lifecycle.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ const mongodb = require('mongodb');
const { v4: uuid } = require('uuid');

const P = require('../../util/promise');
const config = require('../../../config');
const MDStore = require('../../server/object_services/md_store').MDStore;
const coretest = require('./coretest');
const lifecycle = require('../../server/bg_services/lifecycle');
Expand Down Expand Up @@ -127,22 +126,12 @@ mocha.describe('lifecycle', () => {

async function verify_object_deleted(key) {
await P.delay(100); // 0.1sec
await rpc_client.system.read_system();
/* read_activity_log fails w/postgres
see https://github.com/noobaa/noobaa-core/runs/5750698669
*/
if (config.DB_TYPE === 'mongodb') {
const eventLogs = await rpc_client.events.read_activity_log({limit: 32});
console.log('read_activity_log logs: ', util.inspect(eventLogs));
const found = eventLogs.logs.find(e => (e.event === 'obj.deleted') && (e.obj.key === key));
console.log('read_activity_log found log: ', found);
assert(found && found.obj.key === key, `find deleted actual ${util.inspect(found)} expected ${key}`);
}
const listObjectResult = await rpc_client.object.list_objects_admin({ bucket: Bucket, prefix: key });
console.log('list_objects_admin objects: ', util.inspect(listObjectResult.objects));
const actualLength = listObjectResult.objects.length;
assert.strictEqual(actualLength, 0, `listObjectResult actual ${actualLength} !== expected 0`);
}

mocha.it('test prefix, absolute date expiration', async () => {
const key = uuid();
const prefix = key.split('-')[0];
Expand All @@ -161,11 +150,11 @@ mocha.describe('lifecycle', () => {
const prefix = key.split('-')[0];
const age = 17;
const bucket = Bucket;
const tagging = [ {key: 'tagname1', value: 'tagvalue1'}, {key: 'tagname2', value: 'tagvalue2'}, {key: 'tagname3', value: 'tagvalue3'}];
const tagging = [{ key: 'tagname1', value: 'tagvalue1' }, { key: 'tagname2', value: 'tagvalue2' }, { key: 'tagname3', value: 'tagvalue3' }];

await create_mock_object(key, bucket, age, undefined, tagging);
// match by tags subset, out of order
const filter_tagging = [ {key: 'tagname3', value: 'tagvalue3'}, {key: 'tagname2', value: 'tagvalue2'} ];
const filter_tagging = [{ key: 'tagname3', value: 'tagvalue3' }, { key: 'tagname2', value: 'tagvalue2' }];
const putLifecycleParams = commonTests.date_lifecycle_configuration_and_tags(bucket, prefix, filter_tagging);
await s3.putBucketLifecycleConfiguration(putLifecycleParams).promise();
await lifecycle.background_worker();
Expand Down Expand Up @@ -214,8 +203,8 @@ mocha.describe('lifecycle', () => {
const key = uuid();
const object_age = 2;
const days = 1;
const tag = { key: 'tagname', value: 'tagvalue'};
const tagging = [ tag ];
const tag = { key: 'tagname', value: 'tagvalue' };
const tagging = [tag];
const bucket = Bucket;

await create_mock_object(key, bucket, object_age, undefined, tagging);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
const { inspect } = require('util');
const { Storage } = require('@google-cloud/storage');
const dbg = require('../../../util/debug_module')(__filename);
const buffer_utils = require('../../../util/buffer_utils');
dbg.set_process_name('analyze_resource');
const CloudVendor = require('./analyze_resource_cloud_vendor_abstract');

Expand Down Expand Up @@ -58,8 +59,7 @@ class AnalyzeGcp extends CloudVendor {
.bucket(bucket)
.file(key)
.createWriteStream();
stream.write(''); //write an empty file
stream.end();
await buffer_utils.write_to_stream(stream, ''); //write an empty file
stream.on('response', resp => {
dbg.log0(`Write of ${key} response: ${inspect(resp)}`);
});
Expand Down
3 changes: 2 additions & 1 deletion src/util/buffer_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -165,11 +165,12 @@ function count_length(buffers) {
function write_to_stream(writable, buf) {
return new Promise((resolve, reject) => {
writable.once('error', reject);
writable.once('finish', resolve);
writable.write(buf, err => {
if (err) {
return reject(err);
}
return resolve();
writable.end();
});
});
}
Expand Down

0 comments on commit 42b98e3

Please sign in to comment.