diff --git a/.github/workflows/backend-build.yml b/.github/workflows/backend-build.yml index d0448a7b..1bcb86c7 100644 --- a/.github/workflows/backend-build.yml +++ b/.github/workflows/backend-build.yml @@ -35,6 +35,10 @@ jobs: working-directory: ./backend run: yarn install + - name: Run tests + working-directory: ./backend + run: yarn test + - name: Run lint working-directory: ./backend run: yarn lint diff --git a/backend/.yarn/install-state.gz b/backend/.yarn/install-state.gz index 35ff3a04..76971f4c 100644 Binary files a/backend/.yarn/install-state.gz and b/backend/.yarn/install-state.gz differ diff --git a/backend/__tests__/e2e/objects/nodes.spec.ts b/backend/__tests__/e2e/objects/nodes.spec.ts new file mode 100644 index 00000000..207a994c --- /dev/null +++ b/backend/__tests__/e2e/objects/nodes.spec.ts @@ -0,0 +1,131 @@ +import { v4 } from 'uuid' +import { NodesUseCases } from '../../../src/useCases/index.js' +import { + blake3HashFromCid, + cidOfNode, + cidToString, + createFileChunkIpldNode, + createNode, + createSingleFileIpldNode, + encodeNode, + MetadataType, +} from '@autonomys/auto-dag-data' +import { dbMigration } from '../../utils/dbMigrate.js' +import { nodesRepository } from '../../../src/repositories/index.js' +import { ObjectMappingListEntry } from '../../../src/models/objects/objectMappings.js' + +describe('Nodes', () => { + const id = v4() + + const expectNode = { + cid: id, + headCid: id, + rootCid: id, + type: MetadataType.File, + encodedNode: '', + } + + beforeAll(async () => { + await dbMigration.up() + }) + afterAll(async () => { + await dbMigration.down() + }) + + it('should be able to save node', async () => { + await expect( + NodesUseCases.saveNode( + expectNode.cid, + expectNode.headCid, + expectNode.rootCid, + expectNode.type, + expectNode.encodedNode, + ), + ).resolves.not.toThrow() + }) + + it('should be able to get node', async () => { + const node = await nodesRepository.getNode(id) + expect(node).toMatchObject({ + cid: expectNode.cid, + head_cid: expectNode.headCid, + root_cid: expectNode.rootCid, + type: expectNode.type, + encoded_node: expectNode.encodedNode, + }) + }) + + it('should be able to get chunk data', async () => { + const buffer = Buffer.from('test') + await NodesUseCases.saveNode( + id, + id, + id, + MetadataType.File, + Buffer.from(encodeNode(createFileChunkIpldNode(buffer))).toString( + 'base64', + ), + ) + const data = await NodesUseCases.getChunkData(id) + expect(data).toEqual(buffer) + }) + + it('should be able to get node with wrong cid returns undefined', async () => { + const data = await NodesUseCases.getNode(v4()) + expect(data).toBeUndefined() + }) + + it('should be able to get blockstore', async () => { + const node = createNode(Buffer.from('test'), []) + const randomCID = cidOfNode(node) + await NodesUseCases.saveNode( + randomCID, + randomCID, + randomCID, + MetadataType.File, + Buffer.from( + encodeNode(createFileChunkIpldNode(Buffer.from(encodeNode(node)))), + ).toString('base64'), + ) + + const blockstore = await nodesRepository.getNode(cidToString(randomCID)) + + expect(blockstore).toMatchObject({ + cid: cidToString(randomCID), + head_cid: cidToString(randomCID), + root_cid: cidToString(randomCID), + type: MetadataType.File, + encoded_node: expect.any(String), + }) + }) + + it('should be able to save nodes', async () => { + const nodes = Array.from({ length: 10 }, (_, index) => + createSingleFileIpldNode(Buffer.from(`test-${index}`), `test-${index}`), + ) + await NodesUseCases.saveNodes(id, id, nodes) + }) + + it('process node archived', async () => { + const node = createSingleFileIpldNode(Buffer.from('test'), 'test') + const cid = cidOfNode(node) + const hash = Buffer.from(blake3HashFromCid(cid)).toString('hex') + const objectMappings: ObjectMappingListEntry = { + blockNumber: 1, + v0: { + objects: [[hash, 1, 1]], + }, + } + + await NodesUseCases.saveNodes(cid, cid, [node]) + + await NodesUseCases.processNodeArchived(objectMappings) + + const savedNode = await nodesRepository.getNode(cidToString(cid)) + expect(savedNode).toMatchObject({ + cid: cidToString(cid), + piece_index: 1, + piece_offset: 1, + }) + }) +}) diff --git a/backend/__tests__/e2e/objects/object.spec.ts b/backend/__tests__/e2e/objects/object.spec.ts new file mode 100644 index 00000000..2547991d --- /dev/null +++ b/backend/__tests__/e2e/objects/object.spec.ts @@ -0,0 +1,218 @@ +import { User } from '../../../src/models/users' +import { ObjectUseCases, UsersUseCases } from '../../../src/useCases' +import { dbMigration } from '../../utils/dbMigrate' +import { PreconditionError } from '../../utils/error' +import { createMockUser, MOCK_UNONBOARDED_USER } from '../../utils/mocks' +import { uploadFile } from '../../utils/uploads' + +describe('Object', () => { + let user: User + let fileCid: string + + beforeAll(async () => { + await dbMigration.up() + const result = await UsersUseCases.onboardUser(MOCK_UNONBOARDED_USER) + if (!result) { + throw new PreconditionError('Failed to onboard user') + } + user = result + fileCid = await uploadFile(user, 'test.txt', 'test', 'text/plain') + }) + + afterAll(async () => { + await dbMigration.down() + }) + + it('should get object summary by cid', async () => { + const summary = await ObjectUseCases.getObjectSummaryByCID(fileCid) + expect(summary).toBeDefined() + }) + + it('should not get listed in deleted objects', async () => { + const summary = await ObjectUseCases.getMarkedAsDeletedRoots(user) + expect(summary.rows.length).toBe(0) + }) + + it('should get listed in user objects', async () => { + const summary = await ObjectUseCases.getRootObjects( + { + scope: 'user', + user, + }, + 1, + 0, + ) + expect(summary.rows).toMatchObject([ + { + headCid: fileCid, + }, + ]) + }) + + it('should get listed in global objects', async () => { + const summary = await ObjectUseCases.getRootObjects( + { + scope: 'global', + }, + 1, + 0, + ) + expect(summary.rows).toMatchObject([ + { + headCid: fileCid, + }, + ]) + }) + + it('should be able to mark as deleted and get listed in deleted objects', async () => { + await expect( + ObjectUseCases.markAsDeleted(user, fileCid), + ).resolves.not.toThrow() + + const deletedSummary = await ObjectUseCases.getMarkedAsDeletedRoots(user) + expect(deletedSummary.rows).toMatchObject([ + { + headCid: fileCid, + }, + ]) + }) + + it('should be able to restore object', async () => { + await expect( + ObjectUseCases.restoreObject(user, fileCid), + ).resolves.not.toThrow() + + const deletedSummary = await ObjectUseCases.getMarkedAsDeletedRoots(user) + expect(deletedSummary.rows).toMatchObject([]) + + const summary = await ObjectUseCases.getRootObjects( + { + scope: 'user', + user, + }, + 1, + 0, + ) + + expect(summary.rows).toMatchObject([ + { + headCid: fileCid, + }, + ]) + + const globalSummary = await ObjectUseCases.getRootObjects( + { + scope: 'global', + }, + 1, + 0, + ) + + expect(globalSummary.rows).toMatchObject([ + { + headCid: fileCid, + }, + ]) + }) + + it('should be able to search object by name', async () => { + const metadata = await ObjectUseCases.getMetadata(fileCid) + if (!metadata) throw new PreconditionError('Metadata not found') + + const search = await ObjectUseCases.searchMetadataByName( + metadata.name!, + 5, + { + scope: 'user', + user, + }, + ) + expect(search).toMatchObject([{ metadata }]) + }) + + it('should be able to search object by cid', async () => { + const metadata = await ObjectUseCases.getMetadata(fileCid) + if (!metadata) throw new PreconditionError('Metadata not found') + + const search = await ObjectUseCases.searchMetadataByCID(fileCid, 5, { + scope: 'user', + user, + }) + expect(search).toMatchObject([{ metadata }]) + }) + + it('should be able to search object by name (using common method)', async () => { + const metadata = await ObjectUseCases.getMetadata(fileCid) + if (!metadata) throw new PreconditionError('Metadata not found') + + const search = await ObjectUseCases.searchByCIDOrName(metadata.name!, 5, { + scope: 'user', + user, + }) + expect(search).toMatchObject([ + { + cid: fileCid, + name: metadata.name, + }, + ]) + }) + + it('should be able to search object by cid (using common method)', async () => { + const metadata = await ObjectUseCases.getMetadata(fileCid) + if (!metadata) throw new PreconditionError('Metadata not found') + + const search = await ObjectUseCases.searchByCIDOrName(fileCid, 5, { + scope: 'user', + user, + }) + expect(search).toMatchObject([ + { + cid: fileCid, + name: metadata.name, + }, + ]) + }) + + describe('Share object', () => { + let randomFile: string + it('should be able to share object', async () => { + const mockUser = await createMockUser() + randomFile = await uploadFile(mockUser, 'test.txt', 'test', 'text/plain') + + await expect( + ObjectUseCases.shareObject(mockUser, randomFile, user.publicId!), + ).resolves.not.toThrow() + + const sharedRoots = await ObjectUseCases.getSharedRoots(user) + expect(sharedRoots.rows).toMatchObject([ + { + headCid: randomFile, + }, + ]) + }) + + it('should be able to delete shared object', async () => { + await expect( + ObjectUseCases.markAsDeleted(user, randomFile), + ).resolves.not.toThrow() + }) + + it('should not be listed in shared objects', async () => { + const sharedRoots = await ObjectUseCases.getSharedRoots(user) + expect(sharedRoots.rows).toMatchObject([]) + }) + + it('should be able to restore shared object', async () => { + await expect( + ObjectUseCases.restoreObject(user, randomFile), + ).resolves.not.toThrow() + + const sharedRoots = await ObjectUseCases.getSharedRoots(user) + expect(sharedRoots.rows).toMatchObject([ + { + headCid: randomFile, + }, + ]) + }) + }) +}) diff --git a/backend/__tests__/e2e/objects/transactionResults.spec.ts b/backend/__tests__/e2e/objects/transactionResults.spec.ts new file mode 100644 index 00000000..511ece93 --- /dev/null +++ b/backend/__tests__/e2e/objects/transactionResults.spec.ts @@ -0,0 +1,34 @@ +import { v4 } from 'uuid' +import { TransactionResult } from '../../../src/models/objects' +import { TransactionResultsUseCases } from '../../../src/useCases' +import { dbMigration } from '../../utils/dbMigrate' + +describe('Transaction Results', () => { + beforeAll(async () => { + await dbMigration.up() + }) + + afterAll(async () => { + await dbMigration.down() + }) + + it('should be able to save transaction results', async () => { + const transactionResult: TransactionResult = { + success: true, + batchTxHash: '0x123', + status: 'success', + } + + const cid = v4() + + await TransactionResultsUseCases.setTransactionResults( + cid, + transactionResult, + ) + + const savedTransactionResult = + await TransactionResultsUseCases.getNodeTransactionResult(cid) + + expect(savedTransactionResult).toEqual(transactionResult) + }) +}) diff --git a/backend/__tests__/e2e/uploads/files.spec.ts b/backend/__tests__/e2e/uploads/files.spec.ts new file mode 100644 index 00000000..d5213451 --- /dev/null +++ b/backend/__tests__/e2e/uploads/files.spec.ts @@ -0,0 +1,318 @@ +import { User } from '../../../src/models/users/index.js' +import { UsersUseCases } from '../../../src/useCases/users/users.js' +import { dbMigration } from '../../utils/dbMigrate.js' +import { PreconditionError } from '../../utils/error.js' +import { MOCK_UNONBOARDED_USER } from '../../utils/mocks.js' +import { UploadsUseCases } from '../../../src/useCases/uploads/uploads.js' +import { + Upload, + UploadStatus, + UploadType, +} from '../../../src/models/uploads/upload.js' +import { blockstoreRepository } from '../../../src/repositories/uploads/index.js' +import { MemoryBlockstore } from 'blockstore-core' +import { + processFileToIPLDFormat, + MetadataType, + DEFAULT_MAX_CHUNK_SIZE, + cidToString, +} from '@autonomys/auto-dag-data' +import { ObjectUseCases } from '../../../src/useCases/objects/object.js' +import { uploadsRepository } from '../../../src/repositories/uploads/uploads.js' +import { asyncIterableToPromiseOfArray } from '../../../src/utils/async.js' +import { + FilesUseCases, + SubscriptionsUseCases, + TransactionResultsUseCases, +} from '../../../src/useCases/index.js' +import { + interactionsRepository, + nodesRepository, +} from '../../../src/repositories/index.js' +import { InteractionType } from '../../../src/models/objects/interactions.js' +import { databaseDownloadCache } from '../../../src/services/download/databaseDownloadCache/index.js' +import { memoryDownloadCache } from '../../../src/services/download/memoryDownloadCache/index.js' +import { + OwnerRole, + TransactionStatus, +} from '../../../src/models/objects/index.js' + +const files = [ + { + filename: 'test.pdf', + mimeType: 'application/pdf', + rndBuffer: Buffer.alloc(1024 ** 2).fill(0), + }, + { + filename: 'test.txt', + mimeType: null, + rndBuffer: Buffer.alloc(1024).fill(0), + }, +] + +files.map((file, index) => { + describe(`File Upload #${index + 1}`, () => { + let user: User + + beforeAll(async () => { + await dbMigration.up() + const result = await UsersUseCases.onboardUser(MOCK_UNONBOARDED_USER) + if (!result) throw new PreconditionError('Failed to setup test user') + user = result + }) + + afterAll(async () => { + await dbMigration.down() + }) + + const { filename, mimeType, rndBuffer } = file + const fileSize = rndBuffer.length + const TOTAL_CHUNKS = Math.max( + 1, + Math.ceil(fileSize / DEFAULT_MAX_CHUNK_SIZE), + ) + + let upload: Upload + let cid: string + + describe('Upload initiation', () => { + it('should create an upload', async () => { + upload = await UploadsUseCases.createFileUpload( + user, + filename, + mimeType, + null, + null, + null, + ) + + expect(upload).toMatchObject({ + id: expect.any(String), + name: filename, + mimeType, + oauthProvider: user.oauthProvider, + oauthUserId: user.oauthUserId, + fileTree: null, + status: UploadStatus.PENDING, + type: UploadType.FILE, + relativeId: null, + }) + }) + + it('should upload chunks correctly', async () => { + const CHUNK_SIZE = DEFAULT_MAX_CHUNK_SIZE + for (let i = 0; i < fileSize; i += CHUNK_SIZE) { + const uploadedChunkPromise = UploadsUseCases.uploadChunk( + user, + upload.id, + Math.floor(i / CHUNK_SIZE), + Buffer.from(rndBuffer.subarray(i, i + CHUNK_SIZE)), + ) + + await expect(uploadedChunkPromise).resolves.not.toThrow() + } + + const postUploadedChunks = await blockstoreRepository.getByType( + upload.id, + MetadataType.FileChunk, + ) + + const PENDING_CHUNK_DUE_TO_COMPLETION = 1 // The last chunk is pending due to the upload not being completed + const EXPECTED_CHUNKS = TOTAL_CHUNKS - PENDING_CHUNK_DUE_TO_COMPLETION + + expect(postUploadedChunks.length).toBe(EXPECTED_CHUNKS) + }) + }) + + describe('Upload completion', () => { + it('should be able to complete the upload and return the correct CID', async () => { + const blockstore = new MemoryBlockstore() + const expectedCID = await processFileToIPLDFormat( + blockstore, + [rndBuffer], + BigInt(fileSize), + filename, + ) + cid = await UploadsUseCases.completeUpload(user, upload.id) + + expect(cid).toBe(cidToString(expectedCID)) + }) + + it('should have generated expected number of chunks and file', async () => { + let postUploadedChunks = await blockstoreRepository.getByType( + upload.id, + MetadataType.FileChunk, + ) + if (postUploadedChunks.length === 0) { + postUploadedChunks = await blockstoreRepository.getByType( + upload.id, + MetadataType.File, + ) + } + expect(postUploadedChunks).toHaveLength(TOTAL_CHUNKS) + + const postUploadedFile = await blockstoreRepository.getByType( + upload.id, + MetadataType.File, + ) + expect(postUploadedFile).toHaveLength(1) + }) + + it('should have generated expected metadata', async () => { + const metadata = await ObjectUseCases.getMetadata(cid) + + expect(metadata).toMatchObject({ + type: 'file', + dataCid: cid, + name: filename, + totalSize: BigInt(fileSize).toString(), + totalChunks: TOTAL_CHUNKS, + chunks: expect.any(Array), + ...(mimeType ? { mimeType } : {}), + }) + }) + + it('should be in MIGRATING status', async () => { + const uploadEntry = await uploadsRepository.getUploadEntryById( + upload.id, + ) + expect(uploadEntry).not.toBeNull() + expect(uploadEntry!.status).toBe(UploadStatus.MIGRATING) + }) + + it('should be returned as pending to migrate', async () => { + const LIMIT = 1000 + const pendingMigrations = + await UploadsUseCases.getPendingMigrations(LIMIT) + expect(pendingMigrations).toHaveLength(1) + expect(pendingMigrations[0].id).toBe(upload.id) + }) + + it('should be able to process the migration', async () => { + await expect( + UploadsUseCases.processMigration(upload.id), + ).resolves.not.toThrow() + + const uploadEntry = await uploadsRepository.getUploadEntryById( + upload.id, + ) + expect(uploadEntry).not.toBeNull() + expect(uploadEntry!.status).toBe(UploadStatus.COMPLETED) + }) + }) + + describe('Downloading the file', () => { + it('should be able to retrieve the file', async () => { + const file = await FilesUseCases.downloadObject(user, cid) + const fileArray = await asyncIterableToPromiseOfArray(file) + const fileBuffer = Buffer.concat(fileArray) + expect(fileBuffer).toEqual(rndBuffer) + }) + + it('should have been added an interaction', async () => { + const { id } = await SubscriptionsUseCases.getSubscription(user) + const interactions = + await interactionsRepository.getInteractionsBySubscriptionIdAndTypeInTimeRange( + id, + InteractionType.Download, + new Date(0), + new Date(), + ) + + expect(interactions).toHaveLength(1) + }) + + it('download cache should be updated', async () => { + const asyncFromDatabase = await databaseDownloadCache.get(cid) + expect(asyncFromDatabase).not.toBeNull() + const fileArrayFromDatabase = await asyncIterableToPromiseOfArray( + asyncFromDatabase!, + ) + const fileBufferFromDatabase = Buffer.concat(fileArrayFromDatabase) + expect(fileBufferFromDatabase).toEqual(rndBuffer) + + const asyncFromMemory = memoryDownloadCache.get(cid) + expect(asyncFromMemory).not.toBeNull() + const fileArrayFromMemory = await asyncIterableToPromiseOfArray( + asyncFromMemory!, + ) + const fileBufferFromMemory = Buffer.concat(fileArrayFromMemory) + expect(fileBufferFromMemory).toEqual(rndBuffer) + }) + }) + + describe('Object Information', () => { + const PUBLISH_ON_BLOCK = 100 + + it('object information should be initialized', async () => { + const nodes = await nodesRepository.getNodesByHeadCid(cid) + const objectInformation = await ObjectUseCases.getObjectInformation(cid) + expect(objectInformation).not.toBeNull() + + expect(objectInformation?.cid).toBe(cid) + expect(objectInformation?.owners).toEqual([ + { + publicId: user.publicId, + role: OwnerRole.ADMIN, + }, + ]) + expect(objectInformation?.uploadStatus).toEqual({ + uploadedNodes: 0, + totalNodes: nodes.length, + archivedNodes: 0, + minimumBlockDepth: null, + maximumBlockDepth: null, + }) + }) + + it('object information should be updated on publishing', async () => { + // Mocking publishing onchain + const nodes = await nodesRepository.getNodesByHeadCid(cid) + const transactionResults = nodes.map((node) => + TransactionResultsUseCases.setTransactionResults(node.cid, { + success: true, + batchTxHash: '0x123', + status: TransactionStatus.CONFIRMED, + blockNumber: PUBLISH_ON_BLOCK, + }), + ) + await Promise.all(transactionResults) + // End of mocking + + const objectInformation = await ObjectUseCases.getObjectInformation(cid) + expect(objectInformation).not.toBeNull() + expect(objectInformation?.uploadStatus).toEqual({ + uploadedNodes: nodes.length, + totalNodes: nodes.length, + archivedNodes: 0, + minimumBlockDepth: PUBLISH_ON_BLOCK, + maximumBlockDepth: PUBLISH_ON_BLOCK, + }) + }) + + it('object information should be updated on archiving', async () => { + // Mocking archiving onchain + const nodes = await nodesRepository.getNodesByHeadCid(cid) + const transactionResults = nodes.map((node) => + nodesRepository.setNodeArchivingData({ + cid: node.cid, + pieceIndex: 1, + pieceOffset: 1, + }), + ) + await Promise.all(transactionResults) + // End of mocking + + const objectInformation = await ObjectUseCases.getObjectInformation(cid) + expect(objectInformation).not.toBeNull() + expect(objectInformation?.uploadStatus).toEqual({ + uploadedNodes: nodes.length, + totalNodes: nodes.length, + archivedNodes: nodes.length, + minimumBlockDepth: PUBLISH_ON_BLOCK, + maximumBlockDepth: PUBLISH_ON_BLOCK, + }) + }) + }) + }) +}) diff --git a/backend/__tests__/e2e/uploads/folder.spec.ts b/backend/__tests__/e2e/uploads/folder.spec.ts new file mode 100644 index 00000000..0d47fd0c --- /dev/null +++ b/backend/__tests__/e2e/uploads/folder.spec.ts @@ -0,0 +1,330 @@ +import { + cidToString, + processFileToIPLDFormat, + processFolderToIPLDFormat, + stringToCid, +} from '@autonomys/auto-dag-data' +import { + FolderTreeFolder, + OwnerRole, + TransactionStatus, +} from '../../../src/models/objects' +import { + FileUpload, + Upload, + UploadStatus, + UploadType, +} from '../../../src/models/uploads/upload' +import { User } from '../../../src/models/users' +import { + FilesUseCases, + ObjectUseCases, + TransactionResultsUseCases, + UsersUseCases, +} from '../../../src/useCases' +import { UploadsUseCases } from '../../../src/useCases/uploads/uploads' +import { dbMigration } from '../../utils/dbMigrate' +import { PreconditionError } from '../../utils/error' +import { MOCK_UNONBOARDED_USER } from '../../utils/mocks' +import { MemoryBlockstore } from 'blockstore-core' +import { uploadsRepository } from '../../../src/repositories/uploads/uploads' +import { nodesRepository } from '../../../src/repositories' +import { asyncIterableToPromiseOfArray } from '../../../src/utils/async' +import PizZip from 'pizzip' + +describe('Folder Upload', () => { + let user: User + let folderUpload: Upload + + beforeAll(async () => { + await dbMigration.up() + const result = await UsersUseCases.onboardUser(MOCK_UNONBOARDED_USER) + if (!result) throw new PreconditionError('Failed to setup test user') + user = result + }) + + afterAll(async () => { + await dbMigration.down() + }) + + const folderName = 'test' + const folderId = folderName + const subfileName = 'test.txt' + const subfileId = subfileName + const subfileMimeType = 'text/plain' + const subfileSize = 100 + const subfileBuffer = Buffer.from('t'.repeat(subfileSize)) + const subfolderName = 'test2' + + const fileTree: FolderTreeFolder = { + name: folderName, + type: 'folder', + children: [ + { + type: 'file', + name: subfileName, + id: subfileId, + }, + { + type: 'folder', + name: subfolderName, + id: subfolderName, + children: [], + }, + ], + id: folderId, + } + const totalChildren = fileTree.children.length + const totalNodes = totalChildren + 1 + + // Result of the subfile upload + let subfileCID: string + // Result of the folder upload + let folderCID: string + // Result of the subfolder upload + let subfolderCid: string + + describe('Folder upload initialization', () => { + it('should be able to create a folder upload', async () => { + folderUpload = await UploadsUseCases.createFolderUpload( + user, + folderName, + fileTree, + null, + ) + + expect(folderUpload).toMatchObject({ + id: expect.any(String), + rootId: expect.any(String), + relativeId: null, + type: UploadType.FOLDER, + status: UploadStatus.PENDING, + name: 'test', + fileTree, + }) + }) + }) + + describe('File within folder upload', () => { + let subfileUpload: FileUpload + it('should be able to create an file upload within a folder upload', async () => { + subfileUpload = await UploadsUseCases.createFileInFolder( + user, + folderUpload.id, + subfileId, + subfileName, + subfileMimeType, + ) + + expect(subfileUpload).toMatchObject({ + id: expect.any(String), + rootId: folderUpload.id, + relativeId: subfileId, + type: UploadType.FILE, + status: UploadStatus.PENDING, + name: subfileName, + mimeType: subfileMimeType, + }) + }) + + it('should be able to upload a file to a file upload', async () => { + await expect( + UploadsUseCases.uploadChunk(user, subfileUpload.id, 0, subfileBuffer), + ).resolves.not.toThrow() + }) + + it('should be able to complete subfile upload with matching CIDs', async () => { + const expectedCID = cidToString( + await processFileToIPLDFormat( + new MemoryBlockstore(), + [subfileBuffer], + BigInt(subfileSize), + subfileName, + ), + ) + + subfileCID = await UploadsUseCases.completeUpload(user, subfileUpload.id) + expect(subfileCID).toBe(expectedCID) + }) + + it('should status be updated to completed', async () => { + const upload = await uploadsRepository.getUploadEntryById( + subfileUpload.id, + ) + expect(upload).toBeTruthy() + expect(upload?.status).toBe(UploadStatus.MIGRATING) + }) + + it('should not be generated any metadata', async () => { + if (!subfileCID) throw new PreconditionError('Subfile CID not defined') + const metadata = await ObjectUseCases.getMetadata(subfileCID) + expect(metadata).toBeUndefined() + }) + }) + + describe('Folder upload', () => { + it('should be able to finalize folder upload', async () => { + subfolderCid = cidToString( + await processFolderToIPLDFormat( + new MemoryBlockstore(), + [], + subfolderName, + BigInt(0), + ), + ) + const expectedCID = cidToString( + await processFolderToIPLDFormat( + new MemoryBlockstore(), + [stringToCid(subfileCID), stringToCid(subfolderCid)], + folderName, + BigInt(subfileSize), + ), + ) + + folderCID = await UploadsUseCases.completeUpload(user, folderUpload.id) + expect(folderCID).toBe(expectedCID) + }) + + it('should be generated metadata', async () => { + if (!folderCID) throw new PreconditionError('Folder CID not defined') + const metadata = await ObjectUseCases.getMetadata(folderCID) + expect(metadata).toMatchObject({ + dataCid: folderCID, + type: 'folder', + totalSize: BigInt(subfileSize).toString(), + totalFiles: totalChildren, + children: [ + { + type: 'file', + cid: subfileCID, + totalSize: BigInt(subfileSize).toString(), + }, + { + type: 'folder', + cid: subfolderCid, + totalSize: BigInt(0).toString(), + }, + ], + uploadOptions: {}, + }) + }) + + it('should be able to get file metadata', async () => { + if (!subfileCID) throw new PreconditionError('Subfile CID not defined') + const metadata = await ObjectUseCases.getMetadata(subfileCID) + expect(metadata).toMatchObject({ + dataCid: subfileCID, + type: 'file', + totalSize: BigInt(subfileSize).toString(), + }) + }) + + it('should uploads status be migrating', async () => { + if (!subfileCID) throw new PreconditionError('Subfile CID not defined') + const subfileUpload = await uploadsRepository.getUploadsByRoot( + folderUpload.id, + ) + expect(subfileUpload?.length).toBe(totalNodes) + expect( + subfileUpload?.every((e) => e.status === UploadStatus.MIGRATING), + ).toBe(true) + + await new Promise((resolve) => setTimeout(resolve, 1000)) + }) + + it('should uploads being migrated', async () => { + if (!folderCID) throw new PreconditionError('Folder CID not defined') + + await expect( + UploadsUseCases.processMigration(folderUpload.id), + ).resolves.not.toThrow() + + const uploads = await uploadsRepository.getUploadsByRoot(folderUpload.id) + expect(uploads?.length).toBe(totalNodes) + expect(uploads?.every((e) => e.status === UploadStatus.COMPLETED)).toBe( + true, + ) + }) + + it('upload status should be updated on node publishing', async () => { + // Mocking node publishing + const blockNumber = 123 + const nodes = await nodesRepository.getNodesByRootCid(folderCID) + const promises = nodes.map((e) => { + TransactionResultsUseCases.setTransactionResults(e.cid, { + success: true, + batchTxHash: '0x123', + status: TransactionStatus.CONFIRMED, + blockNumber, + blockHash: '0x123', + }) + }) + + await Promise.all(promises) + // End of mocking + + const objectInformation = + await ObjectUseCases.getObjectInformation(folderCID) + expect(objectInformation).toMatchObject({ + uploadStatus: { + totalNodes: totalNodes, + uploadedNodes: totalNodes, + archivedNodes: 0, + minimumBlockDepth: blockNumber, + maximumBlockDepth: blockNumber, + }, + }) + }) + + it('upload status should be updated when archived', async () => { + const nodes = await nodesRepository.getNodesByRootCid(folderCID) + const promises = nodes.map((e) => { + return nodesRepository.setNodeArchivingData({ + cid: e.cid, + pieceIndex: 1, + pieceOffset: 1, + }) + }) + + await Promise.all(promises) + + const objectInformation = + await ObjectUseCases.getObjectInformation(folderCID) + expect(objectInformation).toMatchObject({ + uploadStatus: { + totalNodes, + uploadedNodes: totalNodes, + archivedNodes: totalNodes, + minimumBlockDepth: expect.any(Number), + maximumBlockDepth: expect.any(Number), + }, + }) + }) + + it('should be able to get object summary', async () => { + const summary = await ObjectUseCases.getObjectSummaryByCID(folderCID) + expect(summary).toMatchObject({ + headCid: folderCID, + name: folderName, + type: 'folder', + size: BigInt(subfileSize).toString(), + owners: [ + { + role: OwnerRole.ADMIN, + publicId: user.publicId, + }, + ], + }) + }) + + it('should be able to download folder as zip', async () => { + const zip = await FilesUseCases.downloadObject(user, folderCID) + const zipArray = await asyncIterableToPromiseOfArray(zip) + const zipBuffer = Buffer.concat(zipArray) + expect(zipBuffer).toBeDefined() + expect(() => { + new PizZip(zipBuffer) + }).not.toThrow() + }) + }) +}) diff --git a/backend/__tests__/e2e/users/admin.spec.ts b/backend/__tests__/e2e/users/admin.spec.ts new file mode 100644 index 00000000..886a4191 --- /dev/null +++ b/backend/__tests__/e2e/users/admin.spec.ts @@ -0,0 +1,71 @@ +import { User, UserRole } from '../../../src/models/users' +import { usersRepository } from '../../../src/repositories' +import { SubscriptionsUseCases, UsersUseCases } from '../../../src/useCases' +import { dbMigration } from '../../utils/dbMigrate' +import { PreconditionError } from '../../utils/error' +import { createMockUser } from '../../utils/mocks' + +describe('Admin management', () => { + let admin: User + + beforeAll(async () => { + await dbMigration.up() + admin = await createMockUser() + await usersRepository + .updateRole(admin.oauthProvider, admin.oauthUserId, UserRole.Admin) + .catch(() => { + throw new PreconditionError('Failed to set admin role') + }) + }) + + afterAll(async () => { + await dbMigration.down() + }) + + it('should fail role update for non-admin users', async () => { + const user = await createMockUser() + + await expect( + UsersUseCases.updateRole(user, user, UserRole.Admin), + ).rejects.toThrow('User does not have admin privileges') + }) + + it('should successfully update role for admin users', async () => { + const user = await createMockUser() + await UsersUseCases.updateRole(admin, user, UserRole.User) + + const updatedUser = await usersRepository.getUserByOAuthInformation( + user.oauthProvider, + user.oauthUserId, + ) + expect(updatedUser?.role).toBe(UserRole.User) + }) + + it('should throw an error when trying to update role to an invalid role', async () => { + const user = await createMockUser() + + await expect( + UsersUseCases.updateRole(admin, user, 'INVALID_ROLE' as UserRole), + ).rejects.toThrow('Invalid role') + }) + + it('should throw an error when trying to update subscription for non-admin users', async () => { + const user = await createMockUser() + + await expect( + SubscriptionsUseCases.updateSubscription(user, user, 'monthly', 100, 100), + ).rejects.toThrow('User does not have admin privileges') + }) + + it('should successfully update subscription for admin users', async () => { + const user = await createMockUser() + + await SubscriptionsUseCases.updateSubscription( + admin, + user, + 'monthly', + 100, + 100, + ) + }) +}) diff --git a/backend/__tests__/e2e/users/apikey.spec.ts b/backend/__tests__/e2e/users/apikey.spec.ts new file mode 100644 index 00000000..9356847f --- /dev/null +++ b/backend/__tests__/e2e/users/apikey.spec.ts @@ -0,0 +1,68 @@ +import { ApiKey, User } from '../../../src/models/users' +import { ApiKeysUseCases } from '../../../src/useCases/users/apikeys' +import { UsersUseCases } from '../../../src/useCases/users/users' +import { PreconditionError } from '../../utils/error' +import { closeDatabase, getDatabase } from '../../../src/drivers/pg' +import { apiKeysRepository } from '../../../src/repositories' +import { ApiKeyAuth } from '../../../src/services/authManager/providers/apikey' +import { dbMigration } from '../../utils/dbMigrate' +import { MOCK_UNONBOARDED_USER } from '../../utils/mocks' + +describe('ApiKeyUseCases', () => { + let user: User + let apiKey: ApiKey + + beforeAll(async () => { + await getDatabase() + await dbMigration.up() + const result = await UsersUseCases.onboardUser(MOCK_UNONBOARDED_USER) + if (!result) { + throw new PreconditionError('User not initialized') + } + user = result + }) + + it('should create an api key', async () => { + apiKey = await ApiKeysUseCases.createApiKey(user) + expect(apiKey).toMatchObject({ + id: expect.any(String), + secret: expect.any(String), + oauthProvider: user.oauthProvider, + oauthUserId: user.oauthUserId, + }) + }) + + it('should be able to be authenticated', async () => { + const authenticatedUser = await ApiKeyAuth.getUserFromApiKey(apiKey.secret) + expect(authenticatedUser).toMatchObject({ + provider: user.oauthProvider, + id: user.oauthUserId, + }) + }) + + it('should be able to mark as deleted an api key', async () => { + await ApiKeysUseCases.deleteApiKey(user, apiKey.id) + + const deletedApiKey = await apiKeysRepository.getApiKeyBySecret( + apiKey.secret, + ) + expect(deletedApiKey?.deletedAt).not.toBeNull() + }) + + it('should not be able to authenticate with a deleted api key', async () => { + await expect(ApiKeyAuth.getUserFromApiKey(apiKey.secret)).rejects.toThrow( + 'Api key has been deleted', + ) + }) + + it('should not be able to authenticate with a non existent api key', async () => { + await expect( + ApiKeyAuth.getUserFromApiKey('non-existent-api-key'), + ).rejects.toThrow('Api key not found') + }) + + afterAll(async () => { + await closeDatabase() + await dbMigration.down() + }) +}) diff --git a/backend/__tests__/e2e/users/credits.spec.ts b/backend/__tests__/e2e/users/credits.spec.ts new file mode 100644 index 00000000..ce803a4d --- /dev/null +++ b/backend/__tests__/e2e/users/credits.spec.ts @@ -0,0 +1,70 @@ +import { InteractionType } from '../../../src/models/objects/interactions' +import { UsersUseCases } from '../../../src/useCases/users/users' +import { PreconditionError } from '../../utils/error' +import { closeDatabase, getDatabase } from '../../../src/drivers/pg' +import { User } from '../../../src/models/users' +import { MOCK_UNONBOARDED_USER } from '../../utils/mocks' +import { dbMigration } from '../../utils/dbMigrate' + +describe('CreditsUseCases', () => { + let user: User + + beforeAll(async () => { + await getDatabase() + await dbMigration.up() + const result = await UsersUseCases.onboardUser(MOCK_UNONBOARDED_USER) + if (!result) throw new PreconditionError('Failed to setup test user') + user = result + }) + + afterAll(async () => { + await closeDatabase() + await dbMigration.down() + }) + + it('should create credits for a user', async () => { + const interactionType = InteractionType.Upload + const size = BigInt(1024) + + const initialCredits = await UsersUseCases.getPendingCreditsByUserAndType( + user.publicId, + interactionType, + ) + + await UsersUseCases.registerInteraction( + user.publicId, + interactionType, + size, + ) + + const pendingCredits = await UsersUseCases.getPendingCreditsByUserAndType( + user.publicId, + interactionType, + ) + + expect(initialCredits - pendingCredits).toEqual(Number(size)) + }) + + it('should create credits for a user on download', async () => { + const interactionType = InteractionType.Download + const size = BigInt(2048) + + const initialCredits = await UsersUseCases.getPendingCreditsByUserAndType( + user.publicId, + interactionType, + ) + + await UsersUseCases.registerInteraction( + user.publicId, + interactionType, + size, + ) + + const pendingCredits = await UsersUseCases.getPendingCreditsByUserAndType( + user.publicId, + interactionType, + ) + + expect(initialCredits - pendingCredits).toEqual(Number(size)) + }) +}) diff --git a/backend/__tests__/e2e/users/user.spec.ts b/backend/__tests__/e2e/users/user.spec.ts new file mode 100644 index 00000000..7a305ed4 --- /dev/null +++ b/backend/__tests__/e2e/users/user.spec.ts @@ -0,0 +1,124 @@ +import { UsersUseCases } from '../../../src/useCases/users/users.js' +import { UnonboardedUser, UserRole } from '../../../src/models/users/user.js' +import { OrganizationsUseCases } from '../../../src/useCases/users/organizations.js' +import { SubscriptionsUseCases } from '../../../src/useCases/users/subscriptions.js' +import { + subscriptionsRepository, + usersRepository, +} from '../../../src/repositories/index.js' +import { closeDatabase, getDatabase } from '../../../src/drivers/pg.js' +import { dbMigration } from '../../utils/dbMigrate.js' +import { createMockUser, MOCK_UNONBOARDED_USER } from '../../utils/mocks.js' + +describe('UsersUseCases', () => { + beforeAll(async () => { + await getDatabase() + await dbMigration.up() + }) + + afterAll(async () => { + await closeDatabase() + await dbMigration.down() + }) + + it('should return unonboarded user info', async () => { + const nonOnboardedUser: UnonboardedUser = { + oauthProvider: 'google', + oauthUserId: 'non-onboarded-user', + role: UserRole.User, + publicId: null, + onboarded: false, + } + + await expect( + UsersUseCases.getUserInfo(nonOnboardedUser), + ).resolves.toMatchObject({ + user: nonOnboardedUser, + }) + }) + + it('should get user info for an onboarded user', async () => { + const user = await createMockUser() + + const userInfo = await UsersUseCases.getUserInfo(user) + expect(userInfo.user).toMatchObject({ + oauthProvider: user.oauthProvider, + oauthUserId: user.oauthUserId, + role: UserRole.User, + }) + }) + + it('should onboard a user', async () => { + const user = await UsersUseCases.onboardUser(MOCK_UNONBOARDED_USER) + + if (!user) { + expect(user).toBeTruthy() + return + } + + expect(user.onboarded).toBe(true) + expect(user.publicId).not.toBeNull() + expect(user.role).toBe(UserRole.User) + expect(user.oauthProvider).toBe(MOCK_UNONBOARDED_USER.oauthProvider) + expect(user.oauthUserId).toBe(MOCK_UNONBOARDED_USER.oauthUserId) + + const userByPublicId = await UsersUseCases.getUserByPublicId(user.publicId!) + + expect(userByPublicId).toEqual(user) + }) + + it('user should have a subscription', async () => { + const user = await UsersUseCases.getUserByOAuthUser({ + provider: MOCK_UNONBOARDED_USER.oauthProvider, + id: MOCK_UNONBOARDED_USER.oauthUserId, + }) + + expect(user).toBeTruthy() + + const promise = SubscriptionsUseCases.getSubscription(user!.publicId) + + await expect(promise).resolves.toEqual( + expect.objectContaining({ + id: expect.any(String), + organizationId: expect.any(String), + uploadLimit: expect.any(Number), + downloadLimit: expect.any(Number), + granularity: expect.any(String), + }), + ) + }) + + it('user should have an organization linked to a subscription', async () => { + const user = await UsersUseCases.getUserByOAuthUser({ + provider: MOCK_UNONBOARDED_USER.oauthProvider, + id: MOCK_UNONBOARDED_USER.oauthUserId, + }) + + const promise = OrganizationsUseCases.getOrganizationByUser(user) + await expect(promise).resolves.toBeTruthy() + + const organization = await promise + expect(organization).toEqual({ + id: expect.any(String), + name: expect.any(String), + }) + + const subscription = await subscriptionsRepository.getByOrganizationId( + organization.id, + ) + + expect(subscription).toBeTruthy() + }) + + it('should be able to get user list', async () => { + const user = await createMockUser() + await usersRepository.updateRole( + user.oauthProvider, + user.oauthUserId, + UserRole.Admin, + ) + + const users = await UsersUseCases.getUserList(user) + expect(users).toBeInstanceOf(Array) + }) +}) diff --git a/backend/__tests__/utils/dbMigrate.ts b/backend/__tests__/utils/dbMigrate.ts new file mode 100644 index 00000000..29d278de --- /dev/null +++ b/backend/__tests__/utils/dbMigrate.ts @@ -0,0 +1,21 @@ +import { closeDatabase, getDatabase } from '../../src/drivers/pg' +import dbMigrate from 'db-migrate' + +let dbMigrateInstance: ReturnType + +const up = async () => { + await getDatabase() + dbMigrateInstance = dbMigrate.getInstance(true) + dbMigrateInstance.silence(true) + await dbMigrateInstance.up() +} + +const down = async () => { + await closeDatabase() + await dbMigrateInstance.down() +} + +export const dbMigration = { + up, + down, +} diff --git a/backend/__tests__/utils/error.ts b/backend/__tests__/utils/error.ts new file mode 100644 index 00000000..c5d259ab --- /dev/null +++ b/backend/__tests__/utils/error.ts @@ -0,0 +1 @@ +export class PreconditionError extends Error {} diff --git a/backend/__tests__/utils/mocks.ts b/backend/__tests__/utils/mocks.ts new file mode 100644 index 00000000..7b98afcc --- /dev/null +++ b/backend/__tests__/utils/mocks.ts @@ -0,0 +1,29 @@ +import { UnonboardedUser, User, UserRole } from '../../src/models/users' +import { faker } from '@faker-js/faker' +import { UsersUseCases } from '../../src/useCases' +import { PreconditionError } from './error' + +export const MOCK_UNONBOARDED_USER: UnonboardedUser = { + oauthProvider: 'google', + oauthUserId: '123', + role: UserRole.User, + publicId: null, + onboarded: false, +} + +export const createMockUser = async (): Promise => { + const user: UnonboardedUser = { + oauthProvider: 'google', + oauthUserId: faker.string.uuid(), + role: UserRole.Admin, + publicId: null, + onboarded: false, + } + + const onboardUser = await UsersUseCases.onboardUser(user) + if (!onboardUser) { + throw new PreconditionError('Failed to onboard user') + } + + return onboardUser +} diff --git a/backend/__tests__/utils/uploads.ts b/backend/__tests__/utils/uploads.ts new file mode 100644 index 00000000..616fb0c5 --- /dev/null +++ b/backend/__tests__/utils/uploads.ts @@ -0,0 +1,35 @@ +import { User } from '../../src/models/users/user.js' +import { UploadsUseCases } from '../../src/useCases/uploads/uploads.js' +import { PreconditionError } from './error.js' + +export const uploadFile = async ( + user: User, + name: string, + content: Buffer | string, + mimeType: string, +) => { + const upload = await UploadsUseCases.createFileUpload( + user, + name, + mimeType, + null, + ) + + if (!upload) { + throw new PreconditionError('Failed to create upload') + } + + content = typeof content === 'string' ? Buffer.from(content) : content + + await UploadsUseCases.uploadChunk(user, upload.id, 0, content).catch(() => { + throw new PreconditionError('Failed to upload chunk') + }) + + const cid = await UploadsUseCases.completeUpload(user, upload.id).catch( + () => { + throw new PreconditionError('Failed to complete upload') + }, + ) + + return cid +} diff --git a/backend/global-setup.ts b/backend/global-setup.ts new file mode 100644 index 00000000..94aaf371 --- /dev/null +++ b/backend/global-setup.ts @@ -0,0 +1,8 @@ +import { PostgreSqlContainer } from '@testcontainers/postgresql' + +export default async () => { + const container = new PostgreSqlContainer().withExposedPorts(54320) + const service = await container.start() + process.env.DATABASE_URL = service.getConnectionUri() + global.__POSTGRES_CONTAINER__ = service +} diff --git a/backend/global-teardown.ts b/backend/global-teardown.ts new file mode 100644 index 00000000..9f1bac8a --- /dev/null +++ b/backend/global-teardown.ts @@ -0,0 +1,7 @@ +export default async () => { + if (global.__POSTGRES_CONTAINER__) { + await global.__POSTGRES_CONTAINER__.stop() + } else { + throw new Error('No database container found') + } +} diff --git a/backend/jest.config.ts b/backend/jest.config.ts index 080fb305..6d1b7b97 100644 --- a/backend/jest.config.ts +++ b/backend/jest.config.ts @@ -1,9 +1,26 @@ +const { createDefaultEsmPreset } = require('ts-jest') + module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - roots: ['/src'], - testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'], + ...createDefaultEsmPreset(), + globalSetup: './global-setup.ts', + globalTeardown: './global-teardown.ts', + testMatch: ['**/__tests__/**/*.spec.ts'], + extensionsToTreatAsEsm: ['.ts'], + moduleNameMapper: { + '^(\\.{1,2}/.*)\\.js$': '$1', + }, + coveragePathIgnorePatterns: [ + './__tests__/utils/', + './node_modules/', + './migrations/', + ], transform: { - '^.+\\.ts$': 'ts-jest', + '^.+\\.tsx?$': [ + 'ts-jest', + { + useESM: true, + tsconfig: 'tsconfig.test.json', + }, + ], }, } diff --git a/backend/migrations/20241002111606-init-schema.js b/backend/migrations/20241002111606-init-schema.js index 3cecd96c..3cd423aa 100644 --- a/backend/migrations/20241002111606-init-schema.js +++ b/backend/migrations/20241002111606-init-schema.js @@ -32,7 +32,6 @@ exports.up = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, @@ -56,7 +55,6 @@ exports.down = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, diff --git a/backend/migrations/20241016181902-credit-management.js b/backend/migrations/20241016181902-credit-management.js index faa676b4..154280bb 100644 --- a/backend/migrations/20241016181902-credit-management.js +++ b/backend/migrations/20241016181902-credit-management.js @@ -32,7 +32,6 @@ exports.up = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, @@ -56,7 +55,6 @@ exports.down = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, diff --git a/backend/migrations/20241016182736-add-observality.js b/backend/migrations/20241016182736-add-observality.js index 7d67c2df..9656b6bc 100644 --- a/backend/migrations/20241016182736-add-observality.js +++ b/backend/migrations/20241016182736-add-observality.js @@ -32,7 +32,6 @@ exports.up = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, @@ -56,7 +55,6 @@ exports.down = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, diff --git a/backend/migrations/20241023223901-upload-schema.js b/backend/migrations/20241023223901-upload-schema.js index c63262a8..0bc3a6c0 100644 --- a/backend/migrations/20241023223901-upload-schema.js +++ b/backend/migrations/20241023223901-upload-schema.js @@ -32,7 +32,6 @@ exports.up = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, @@ -56,7 +55,6 @@ exports.down = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, diff --git a/backend/migrations/20241025210151-update-metadata-pk.js b/backend/migrations/20241025210151-update-metadata-pk.js index 2799ee6f..c764bb67 100644 --- a/backend/migrations/20241025210151-update-metadata-pk.js +++ b/backend/migrations/20241025210151-update-metadata-pk.js @@ -32,7 +32,6 @@ exports.up = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, @@ -56,7 +55,6 @@ exports.down = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, diff --git a/backend/migrations/20241028193217-upload-options.js b/backend/migrations/20241028193217-upload-options.js index 0e9b2cd1..5b0b4aef 100644 --- a/backend/migrations/20241028193217-upload-options.js +++ b/backend/migrations/20241028193217-upload-options.js @@ -32,7 +32,6 @@ exports.up = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, @@ -56,7 +55,6 @@ exports.down = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, diff --git a/backend/migrations/20241029204914-remove-public-handle.js b/backend/migrations/20241029204914-remove-public-handle.js index 305ad465..77222f07 100644 --- a/backend/migrations/20241029204914-remove-public-handle.js +++ b/backend/migrations/20241029204914-remove-public-handle.js @@ -32,7 +32,6 @@ exports.up = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, @@ -56,7 +55,6 @@ exports.down = function (db) { }, function (err, data) { if (err) return reject(err) - console.log('received data: ' + data) resolve(data) }, diff --git a/backend/migrations/20241115171659-add-node-archiving-data.js b/backend/migrations/20241115171659-add-node-archiving-data.js index 5bff72d6..f9f61b8c 100644 --- a/backend/migrations/20241115171659-add-node-archiving-data.js +++ b/backend/migrations/20241115171659-add-node-archiving-data.js @@ -1,53 +1,55 @@ -'use strict'; +'use strict' -var dbm; -var type; -var seed; -var fs = require('fs'); -var path = require('path'); -var Promise; +var dbm +var type +var seed +var fs = require('fs') +var path = require('path') +var Promise /** - * We receive the dbmigrate dependency from dbmigrate initially. - * This enables us to not have to rely on NODE_PATH. - */ -exports.setup = function(options, seedLink) { - dbm = options.dbmigrate; - type = dbm.dataType; - seed = seedLink; - Promise = options.Promise; -}; + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate + type = dbm.dataType + seed = seedLink + Promise = options.Promise +} -exports.up = function(db) { - var filePath = path.join(__dirname, 'sqls', '20241115171659-add-node-archiving-data-up.sql'); - return new Promise( function( resolve, reject ) { - fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ - if (err) return reject(err); - console.log('received data: ' + data); - - resolve(data); - }); +exports.up = function (db) { + var filePath = path.join( + __dirname, + 'sqls', + '20241115171659-add-node-archiving-data-up.sql', + ) + return new Promise(function (resolve, reject) { + fs.readFile(filePath, { encoding: 'utf-8' }, function (err, data) { + if (err) return reject(err) + resolve(data) + }) + }).then(function (data) { + return db.runSql(data) }) - .then(function(data) { - return db.runSql(data); - }); -}; - -exports.down = function(db) { - var filePath = path.join(__dirname, 'sqls', '20241115171659-add-node-archiving-data-down.sql'); - return new Promise( function( resolve, reject ) { - fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ - if (err) return reject(err); - console.log('received data: ' + data); +} - resolve(data); - }); +exports.down = function (db) { + var filePath = path.join( + __dirname, + 'sqls', + '20241115171659-add-node-archiving-data-down.sql', + ) + return new Promise(function (resolve, reject) { + fs.readFile(filePath, { encoding: 'utf-8' }, function (err, data) { + if (err) return reject(err) + resolve(data) + }) + }).then(function (data) { + return db.runSql(data) }) - .then(function(data) { - return db.runSql(data); - }); -}; +} exports._meta = { - "version": 1 -}; + version: 1, +} diff --git a/backend/migrations/20241118155150-cold-file-cache.js b/backend/migrations/20241118155150-cold-file-cache.js index 9312f7aa..2c171d37 100644 --- a/backend/migrations/20241118155150-cold-file-cache.js +++ b/backend/migrations/20241118155150-cold-file-cache.js @@ -1,53 +1,55 @@ -'use strict'; +'use strict' -var dbm; -var type; -var seed; -var fs = require('fs'); -var path = require('path'); -var Promise; +var dbm +var type +var seed +var fs = require('fs') +var path = require('path') +var Promise /** - * We receive the dbmigrate dependency from dbmigrate initially. - * This enables us to not have to rely on NODE_PATH. - */ -exports.setup = function(options, seedLink) { - dbm = options.dbmigrate; - type = dbm.dataType; - seed = seedLink; - Promise = options.Promise; -}; + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate + type = dbm.dataType + seed = seedLink + Promise = options.Promise +} -exports.up = function(db) { - var filePath = path.join(__dirname, 'sqls', '20241118155150-cold-file-cache-up.sql'); - return new Promise( function( resolve, reject ) { - fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ - if (err) return reject(err); - console.log('received data: ' + data); - - resolve(data); - }); +exports.up = function (db) { + var filePath = path.join( + __dirname, + 'sqls', + '20241118155150-cold-file-cache-up.sql', + ) + return new Promise(function (resolve, reject) { + fs.readFile(filePath, { encoding: 'utf-8' }, function (err, data) { + if (err) return reject(err) + resolve(data) + }) + }).then(function (data) { + return db.runSql(data) }) - .then(function(data) { - return db.runSql(data); - }); -}; - -exports.down = function(db) { - var filePath = path.join(__dirname, 'sqls', '20241118155150-cold-file-cache-down.sql'); - return new Promise( function( resolve, reject ) { - fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ - if (err) return reject(err); - console.log('received data: ' + data); +} - resolve(data); - }); +exports.down = function (db) { + var filePath = path.join( + __dirname, + 'sqls', + '20241118155150-cold-file-cache-down.sql', + ) + return new Promise(function (resolve, reject) { + fs.readFile(filePath, { encoding: 'utf-8' }, function (err, data) { + if (err) return reject(err) + resolve(data) + }) + }).then(function (data) { + return db.runSql(data) }) - .then(function(data) { - return db.runSql(data); - }); -}; +} exports._meta = { - "version": 1 -}; + version: 1, +} diff --git a/backend/package.json b/backend/package.json index 382dfcf8..5e89e8e2 100644 --- a/backend/package.json +++ b/backend/package.json @@ -8,7 +8,7 @@ "build": "tsc", "start:server": "node --loader ts-node/esm src/server.ts", "start": "node --loader ts-node/esm", - "test": "jest", + "test": "node --experimental-vm-modules node_modules/jest/bin/jest.js --runInBand --coverage", "lint": "eslint . " }, "dependencies": { @@ -35,6 +35,10 @@ "zod": "^3.23.8" }, "devDependencies": { + "@faker-js/faker": "^9.2.0", + "@swc/core": "^1.9.2", + "@swc/jest": "^0.2.37", + "@testcontainers/postgresql": "^10.14.0", "@types/cors": "^2.8.17", "@types/jest": "^29.5.12", "@types/multer": "^1", @@ -46,6 +50,7 @@ "@typescript-eslint/eslint-plugin": "^8.13.0", "@typescript-eslint/parser": "^8.13.0", "blockstore-core": "^5.0.2", + "db-migrate-plugin-typescript": "^2.0.0", "eslint": "^8.57.1", "eslint-config-prettier": "^9.1.0", "eslint-plugin-eslint-plugin": "^6.3.1", @@ -54,7 +59,7 @@ "interface-store": "^6.0.2", "jest": "^29.7.0", "prettier": "^3.3.3", - "ts-jest": "^29.2.4", + "ts-jest": "^29.2.5", "ts-node": "^10.9.2", "typescript": "^5.6.3" } diff --git a/backend/src/drivers/pg.ts b/backend/src/drivers/pg.ts index f556a7ce..e4f07673 100644 --- a/backend/src/drivers/pg.ts +++ b/backend/src/drivers/pg.ts @@ -1,6 +1,6 @@ import pg from 'pg' -let db: pg.Client +let db: pg.Client | undefined const createDB = async (): Promise => { const client = new pg.Client({ @@ -19,3 +19,10 @@ export const getDatabase = async () => { return db } + +export const closeDatabase = async () => { + if (db) { + await db.end() + db = undefined + } +} diff --git a/backend/src/repositories/objects/nodes.ts b/backend/src/repositories/objects/nodes.ts index 7ac734fd..e76cd431 100644 --- a/backend/src/repositories/objects/nodes.ts +++ b/backend/src/repositories/objects/nodes.ts @@ -53,6 +53,28 @@ const getNode = async (cid: string) => { .then((e) => (e.rows.length > 0 ? e.rows[0] : undefined)) } +const getNodesByHeadCid = async (headCid: string) => { + const db = await getDatabase() + + return db + .query({ + text: 'SELECT * FROM nodes WHERE head_cid = $1', + values: [headCid], + }) + .then((e) => e.rows) +} + +const getNodesByRootCid = async (rootCid: string) => { + const db = await getDatabase() + + return db + .query({ + text: 'SELECT * FROM nodes WHERE root_cid = $1', + values: [rootCid], + }) + .then((e) => e.rows) +} + const getNodeCount = async ({ type, cid, @@ -143,4 +165,6 @@ export const nodesRepository = { saveNodes, getArchivingNodesCID, setNodeArchivingData, + getNodesByHeadCid, + getNodesByRootCid, } diff --git a/backend/src/repositories/objects/transactionResults.ts b/backend/src/repositories/objects/transactionResults.ts index adbc2ec0..08bd77c7 100644 --- a/backend/src/repositories/objects/transactionResults.ts +++ b/backend/src/repositories/objects/transactionResults.ts @@ -35,18 +35,6 @@ const getTransactionResult = async (cid: string) => { return result } -const getHeadTransactionResults = async (head_cid: string) => { - const db = await getDatabase() - const result = await db - .query({ - text: 'SELECT * FROM transaction_results WHERE head_cid = $1', - values: [head_cid], - }) - .then(({ rows }) => rows) - - return result -} - const getPendingUploads = async (limit: number = 100) => { const db = await getDatabase() const result = await db @@ -92,7 +80,6 @@ export const transactionResultsRepository = { storeTransactionResult, getTransactionResult, getPendingUploads, - getHeadTransactionResults, getUploadedNodesByRootCid, getFirstNotArchivedNode, } diff --git a/backend/src/repositories/users/apikeys.ts b/backend/src/repositories/users/apikeys.ts index 99c67d11..82679ee8 100644 --- a/backend/src/repositories/users/apikeys.ts +++ b/backend/src/repositories/users/apikeys.ts @@ -61,7 +61,6 @@ const deleteApiKey = async (id: string): Promise => { 'UPDATE api_keys SET deleted_at = $1 WHERE id = $2', [new Date(), id], ) - console.log('API key matched with: ', result.rowCount) if (result.rowCount === 0) { throw new Error('API key not found') } diff --git a/backend/src/services/onchainPublisher/index.ts b/backend/src/services/onchainPublisher/index.ts index 38694da8..e7b16cd2 100644 --- a/backend/src/services/onchainPublisher/index.ts +++ b/backend/src/services/onchainPublisher/index.ts @@ -27,6 +27,7 @@ const processPendingUploads = safeCallback(async () => { const transactions = pendingUploads.map((upload) => { const buffer = Buffer.from(upload.encoded_node, 'base64') + return { module: 'system', method: 'remark', diff --git a/backend/src/useCases/objects/object.ts b/backend/src/useCases/objects/object.ts index 41308532..d65bb9e3 100644 --- a/backend/src/useCases/objects/object.ts +++ b/backend/src/useCases/objects/object.ts @@ -109,12 +109,6 @@ const searchByCIDOrName = async ( })) } -const getAllMetadata = async () => { - return metadataRepository - .getAllMetadata() - .then((metadata) => metadata.map((entry) => entry.metadata)) -} - const getRootObjects = async ( filter: | { @@ -281,11 +275,11 @@ export const ObjectUseCases = { searchMetadataByCID, searchMetadataByName, searchByCIDOrName, - getAllMetadata, getRootObjects, getSharedRoots, shareObject, getMarkedAsDeletedRoots, markAsDeleted, restoreObject, + getObjectSummaryByCID, } diff --git a/backend/src/useCases/objects/transactionResults.ts b/backend/src/useCases/objects/transactionResults.ts index 9ebec93a..4759cdd1 100644 --- a/backend/src/useCases/objects/transactionResults.ts +++ b/backend/src/useCases/objects/transactionResults.ts @@ -11,14 +11,6 @@ const getNodeTransactionResult = async (cid: CID | string) => { .then((result) => (result ? result.transaction_result : undefined)) } -const getHeadTransactionResults = async (cid: CID | string) => { - const cidString = typeof cid === 'string' ? cid : cidToString(cid) - - return transactionResultsRepository - .getHeadTransactionResults(cidString) - .then((rows) => rows.map((_) => _.transaction_result)) -} - const setTransactionResults = async ( cid: CID | string, transactionResults: TransactionResult, @@ -37,7 +29,6 @@ const getPendingTransactionResults = async (limit: number = 100) => { export const TransactionResultsUseCases = { getNodeTransactionResult, - getHeadTransactionResults, setTransactionResults, getPendingTransactionResults, } diff --git a/backend/src/useCases/uploads/uploads.ts b/backend/src/useCases/uploads/uploads.ts index 56637f63..ca7ef8e0 100644 --- a/backend/src/useCases/uploads/uploads.ts +++ b/backend/src/useCases/uploads/uploads.ts @@ -115,7 +115,7 @@ const createFileInFolder = async ( relativeId: string, name: string, mimeType: string | null, - uploadOptions: FileUploadOptions | null, + uploadOptions: FileUploadOptions | null = null, ): Promise => { const upload = await uploadsRepository.getUploadEntryById(uploadId) if (!upload) { @@ -217,7 +217,7 @@ const createSubFolderUpload = async ( const upload = await uploadsRepository.createUploadEntry( v4(), UploadType.FOLDER, - UploadStatus.PENDING, + UploadStatus.MIGRATING, fileTree.name, fileTree, null, diff --git a/backend/src/useCases/users/organizations.ts b/backend/src/useCases/users/organizations.ts index 2aeeb14f..50f32b5a 100644 --- a/backend/src/useCases/users/organizations.ts +++ b/backend/src/useCases/users/organizations.ts @@ -30,7 +30,7 @@ const getOrganizationByUser = async (user: User): Promise => { throw new Error('Organization not found') } - return organization + return { id: organization.id, name: organization.name } } const initOrganization = async (user: User) => { diff --git a/backend/src/useCases/users/subscriptions.ts b/backend/src/useCases/users/subscriptions.ts index f7fce978..7fd24113 100644 --- a/backend/src/useCases/users/subscriptions.ts +++ b/backend/src/useCases/users/subscriptions.ts @@ -20,7 +20,7 @@ const updateSubscription = async ( ): Promise => { const isAdmin = await UsersUseCases.isAdminUser(executor) if (!isAdmin) { - throw new Error('User is not an admin') + throw new Error('User does not have admin privileges') } const user = await UsersUseCases.resolveUser(userOrPublicId) diff --git a/backend/src/useCases/users/users.ts b/backend/src/useCases/users/users.ts index dd2f199d..ffade2f8 100644 --- a/backend/src/useCases/users/users.ts +++ b/backend/src/useCases/users/users.ts @@ -121,6 +121,9 @@ const updateRole = async ( if (!isAdmin) { throw new Error('User does not have admin privileges') } + if (!Object.values(UserRole).includes(role)) { + throw new Error('Invalid role') + } const user = await resolveUser(userOrPublicId) diff --git a/backend/tsconfig.json b/backend/tsconfig.json index 4d1221d4..155f0870 100644 --- a/backend/tsconfig.json +++ b/backend/tsconfig.json @@ -15,6 +15,6 @@ "strict": true, "allowJs": true }, - "include": ["src/**/*"], + "include": ["src/**/*", "types/**/*"], "exclude": ["node_modules", "dist"] } diff --git a/backend/tsconfig.test.json b/backend/tsconfig.test.json new file mode 100644 index 00000000..742dfbab --- /dev/null +++ b/backend/tsconfig.test.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "types": ["jest", "node"], + "rootDir": "." + } +} diff --git a/backend/types/db-migrate.d.ts b/backend/types/db-migrate.d.ts new file mode 100644 index 00000000..bf9aa621 --- /dev/null +++ b/backend/types/db-migrate.d.ts @@ -0,0 +1,22 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +declare module 'db-migrate' { + interface Options { + cwd?: string // Current working directory + noPlugins?: boolean // Flag to disable plugins + plugins?: Record // Custom plugins + [key: string]: any // Additional options + } + + /** + * Gets an instance of the module. + * @param isModule - Indicates if the instance is a module. + * @param options - Configuration options. + * @param callback - Optional callback function. + * @returns A new instance of the module. + */ + export function getInstance( + isModule: boolean, + options?: Options, + callback?: (...args: any[]) => void, + ): any +} diff --git a/backend/types/global.d.ts b/backend/types/global.d.ts new file mode 100644 index 00000000..e44455a3 --- /dev/null +++ b/backend/types/global.d.ts @@ -0,0 +1,8 @@ +import type { StartedPostgreSqlContainer } from '@testcontainers/postgresql' + +declare global { + // eslint-disable-next-line no-var + var __POSTGRES_CONTAINER__: StartedPostgreSqlContainer +} + +export {} diff --git a/backend/yarn.lock b/backend/yarn.lock index 30b9c110..cf20f005 100644 --- a/backend/yarn.lock +++ b/backend/yarn.lock @@ -424,6 +424,13 @@ __metadata: languageName: node linkType: hard +"@balena/dockerignore@npm:^1.0.2": + version: 1.0.2 + resolution: "@balena/dockerignore@npm:1.0.2" + checksum: 10c0/0bcb067e86f6734ab943ce4ce9a7c8611f2e983a70bccebf9d2309db57695c09dded7faf5be49c929c4c9e9a9174ae55fc625626de0fb9958823c37423d12f4e + languageName: node + linkType: hard + "@bcoe/v8-coverage@npm:^0.2.3": version: 0.2.3 resolution: "@bcoe/v8-coverage@npm:0.2.3" @@ -505,6 +512,20 @@ __metadata: languageName: node linkType: hard +"@faker-js/faker@npm:^9.2.0": + version: 9.2.0 + resolution: "@faker-js/faker@npm:9.2.0" + checksum: 10c0/d711a5d206558f90e3ce9ecafe366e236fbe190b4df9d3968b512ccb87ec625843c919d16050beade88b790ed3df6332f6a837e41fba6de33e7a2f8daa67f08d + languageName: node + linkType: hard + +"@fastify/busboy@npm:^2.0.0": + version: 2.1.1 + resolution: "@fastify/busboy@npm:2.1.1" + checksum: 10c0/6f8027a8cba7f8f7b736718b013f5a38c0476eea67034c94a0d3c375e2b114366ad4419e6a6fa7ffc2ef9c6d3e0435d76dd584a7a1cbac23962fda7650b579e3 + languageName: node + linkType: hard + "@humanwhocodes/config-array@npm:^0.13.0": version: 0.13.0 resolution: "@humanwhocodes/config-array@npm:0.13.0" @@ -628,6 +649,15 @@ __metadata: languageName: node linkType: hard +"@jest/create-cache-key-function@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/create-cache-key-function@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + checksum: 10c0/5c47ef62205264adf77b1ff26b969ce9fe84920b8275c3c5e83f4236859d6ae5e4e7027af99eef04a8e334c4e424d44af3e167972083406070aca733ac2a2795 + languageName: node + linkType: hard + "@jest/environment@npm:^29.7.0": version: 29.7.0 resolution: "@jest/environment@npm:29.7.0" @@ -1721,6 +1751,160 @@ __metadata: languageName: node linkType: hard +"@swc/core-darwin-arm64@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-darwin-arm64@npm:1.9.2" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@swc/core-darwin-x64@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-darwin-x64@npm:1.9.2" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@swc/core-linux-arm-gnueabihf@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-linux-arm-gnueabihf@npm:1.9.2" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@swc/core-linux-arm64-gnu@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-linux-arm64-gnu@npm:1.9.2" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@swc/core-linux-arm64-musl@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-linux-arm64-musl@npm:1.9.2" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@swc/core-linux-x64-gnu@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-linux-x64-gnu@npm:1.9.2" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@swc/core-linux-x64-musl@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-linux-x64-musl@npm:1.9.2" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@swc/core-win32-arm64-msvc@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-win32-arm64-msvc@npm:1.9.2" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@swc/core-win32-ia32-msvc@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-win32-ia32-msvc@npm:1.9.2" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@swc/core-win32-x64-msvc@npm:1.9.2": + version: 1.9.2 + resolution: "@swc/core-win32-x64-msvc@npm:1.9.2" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@swc/core@npm:^1.9.2": + version: 1.9.2 + resolution: "@swc/core@npm:1.9.2" + dependencies: + "@swc/core-darwin-arm64": "npm:1.9.2" + "@swc/core-darwin-x64": "npm:1.9.2" + "@swc/core-linux-arm-gnueabihf": "npm:1.9.2" + "@swc/core-linux-arm64-gnu": "npm:1.9.2" + "@swc/core-linux-arm64-musl": "npm:1.9.2" + "@swc/core-linux-x64-gnu": "npm:1.9.2" + "@swc/core-linux-x64-musl": "npm:1.9.2" + "@swc/core-win32-arm64-msvc": "npm:1.9.2" + "@swc/core-win32-ia32-msvc": "npm:1.9.2" + "@swc/core-win32-x64-msvc": "npm:1.9.2" + "@swc/counter": "npm:^0.1.3" + "@swc/types": "npm:^0.1.15" + peerDependencies: + "@swc/helpers": "*" + dependenciesMeta: + "@swc/core-darwin-arm64": + optional: true + "@swc/core-darwin-x64": + optional: true + "@swc/core-linux-arm-gnueabihf": + optional: true + "@swc/core-linux-arm64-gnu": + optional: true + "@swc/core-linux-arm64-musl": + optional: true + "@swc/core-linux-x64-gnu": + optional: true + "@swc/core-linux-x64-musl": + optional: true + "@swc/core-win32-arm64-msvc": + optional: true + "@swc/core-win32-ia32-msvc": + optional: true + "@swc/core-win32-x64-msvc": + optional: true + peerDependenciesMeta: + "@swc/helpers": + optional: true + checksum: 10c0/697e601fa1246367ca67e87e87c45f6341373ae98d8d24c9586c4069660c73f8675bf94b86cf218308395eda8e355ae076fc8c9c8f7aaa50898c228db38b637d + languageName: node + linkType: hard + +"@swc/counter@npm:^0.1.3": + version: 0.1.3 + resolution: "@swc/counter@npm:0.1.3" + checksum: 10c0/8424f60f6bf8694cfd2a9bca45845bce29f26105cda8cf19cdb9fd3e78dc6338699e4db77a89ae449260bafa1cc6bec307e81e7fb96dbf7dcfce0eea55151356 + languageName: node + linkType: hard + +"@swc/jest@npm:^0.2.37": + version: 0.2.37 + resolution: "@swc/jest@npm:0.2.37" + dependencies: + "@jest/create-cache-key-function": "npm:^29.7.0" + "@swc/counter": "npm:^0.1.3" + jsonc-parser: "npm:^3.2.0" + peerDependencies: + "@swc/core": "*" + checksum: 10c0/abe10d87610bf7c172aa7ab14c64599a22e48c1f43a09d6e22733f85f25fb98e57cb4bb58b9554e60a3ac8629be559bd967d7a8601a3ceaacad618aecccebec2 + languageName: node + linkType: hard + +"@swc/types@npm:^0.1.15": + version: 0.1.15 + resolution: "@swc/types@npm:0.1.15" + dependencies: + "@swc/counter": "npm:^0.1.3" + checksum: 10c0/82bcfa64e53c6c93ae162fe9e491e5f300227fad6f110e32d9718e5a0e29586bc79c516234f6eccbe5ccd7ed72b514a21f03196a54408cf1b7b47c072fad44f0 + languageName: node + linkType: hard + +"@testcontainers/postgresql@npm:^10.14.0": + version: 10.14.0 + resolution: "@testcontainers/postgresql@npm:10.14.0" + dependencies: + testcontainers: "npm:^10.14.0" + checksum: 10c0/cc93de820fe0a9ce9b72c4c35048e5fb79e6dc151ab57530ce0686b9dfe7d67710b297163304c54daccfae73b58364c8f9345a5b9b252443e7b7c44bb7223305 + languageName: node + linkType: hard + "@tsconfig/node10@npm:^1.0.7": version: 1.0.11 resolution: "@tsconfig/node10@npm:1.0.11" @@ -1836,6 +2020,27 @@ __metadata: languageName: node linkType: hard +"@types/docker-modem@npm:*": + version: 3.0.6 + resolution: "@types/docker-modem@npm:3.0.6" + dependencies: + "@types/node": "npm:*" + "@types/ssh2": "npm:*" + checksum: 10c0/d3ffd273148bc883ff9b1a972b1f84c1add6d9a197d2f4fc9774db4c814f39c2e51cc649385b55d781c790c16fb0bf9c1f4c62499bd0f372a4b920190919445d + languageName: node + linkType: hard + +"@types/dockerode@npm:^3.3.29": + version: 3.3.32 + resolution: "@types/dockerode@npm:3.3.32" + dependencies: + "@types/docker-modem": "npm:*" + "@types/node": "npm:*" + "@types/ssh2": "npm:*" + checksum: 10c0/89b45da46ffd0eee36b469a6e61ddcbf101835ee2212ac26b84ee01556c5a99155469e5e345eadb677fd3dd80e7f238d3f265c15b3f99f94f4c53cad9b35023c + languageName: node + linkType: hard + "@types/express-serve-static-core@npm:^4.17.33": version: 4.19.6 resolution: "@types/express-serve-static-core@npm:4.19.6" @@ -1993,6 +2198,15 @@ __metadata: languageName: node linkType: hard +"@types/node@npm:^18.11.18": + version: 18.19.64 + resolution: "@types/node@npm:18.19.64" + dependencies: + undici-types: "npm:~5.26.4" + checksum: 10c0/a54009cd222f5751c903e5f4889a0f12e3d3755a1f87ce919455eeaf00a9ba0c9215c4a92bc3d8df585a894fa3e4cf218e5afccced355688624133e1a4b88235 + languageName: node + linkType: hard + "@types/pg-format@npm:^1": version: 1.0.5 resolution: "@types/pg-format@npm:1.0.5" @@ -2046,6 +2260,34 @@ __metadata: languageName: node linkType: hard +"@types/ssh2-streams@npm:*": + version: 0.1.12 + resolution: "@types/ssh2-streams@npm:0.1.12" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/6c860066e76391c937723b9f8c3953208737be5adf33b5584d7817ec90913094f2ca578e1d47717182f1d62cb5ca8e83fdec0241d73bf064221e3a2b2d132f0e + languageName: node + linkType: hard + +"@types/ssh2@npm:*": + version: 1.15.1 + resolution: "@types/ssh2@npm:1.15.1" + dependencies: + "@types/node": "npm:^18.11.18" + checksum: 10c0/83c83684e0d620ab940e05c5b7e846eacf6c56761e421dbe6a5a51daa09c82fb71ea4843b792b6e6b2edd0bee8eb665034ffd73978d936b9f008c553bcc38ea7 + languageName: node + linkType: hard + +"@types/ssh2@npm:^0.5.48": + version: 0.5.52 + resolution: "@types/ssh2@npm:0.5.52" + dependencies: + "@types/node": "npm:*" + "@types/ssh2-streams": "npm:*" + checksum: 10c0/95c52fd3438dedae6a59ca87b6558cb36568db6b9144c6c8a28c168739e04c51e27c02908aae14950b7b5020e1c40fea039b1203ae2734c356a40a050fd51c84 + languageName: node + linkType: hard + "@types/stack-utils@npm:^2.0.0": version: 2.0.3 resolution: "@types/stack-utils@npm:2.0.3" @@ -2241,6 +2483,15 @@ __metadata: languageName: node linkType: hard +"abort-controller@npm:^3.0.0": + version: 3.0.0 + resolution: "abort-controller@npm:3.0.0" + dependencies: + event-target-shim: "npm:^5.0.0" + checksum: 10c0/90ccc50f010250152509a344eb2e71977fbf8db0ab8f1061197e3275ddf6c61a41a6edfd7b9409c664513131dd96e962065415325ef23efa5db931b382d24ca5 + languageName: node + linkType: hard + "accepts@npm:~1.3.8": version: 1.3.8 resolution: "accepts@npm:1.3.8" @@ -2372,6 +2623,36 @@ __metadata: languageName: node linkType: hard +"archiver-utils@npm:^5.0.0, archiver-utils@npm:^5.0.2": + version: 5.0.2 + resolution: "archiver-utils@npm:5.0.2" + dependencies: + glob: "npm:^10.0.0" + graceful-fs: "npm:^4.2.0" + is-stream: "npm:^2.0.1" + lazystream: "npm:^1.0.0" + lodash: "npm:^4.17.15" + normalize-path: "npm:^3.0.0" + readable-stream: "npm:^4.0.0" + checksum: 10c0/3782c5fa9922186aa1a8e41ed0c2867569faa5f15c8e5e6418ea4c1b730b476e21bd68270b3ea457daf459ae23aaea070b2b9f90cf90a59def8dc79b9e4ef538 + languageName: node + linkType: hard + +"archiver@npm:^7.0.1": + version: 7.0.1 + resolution: "archiver@npm:7.0.1" + dependencies: + archiver-utils: "npm:^5.0.2" + async: "npm:^3.2.4" + buffer-crc32: "npm:^1.0.0" + readable-stream: "npm:^4.0.0" + readdir-glob: "npm:^1.1.2" + tar-stream: "npm:^3.0.0" + zip-stream: "npm:^6.0.1" + checksum: 10c0/02afd87ca16f6184f752db8e26884e6eff911c476812a0e7f7b26c4beb09f06119807f388a8e26ed2558aa8ba9db28646ebd147a4f99e46813b8b43158e1438e + languageName: node + linkType: hard + "arg@npm:^4.1.0": version: 4.1.3 resolution: "arg@npm:4.1.3" @@ -2402,7 +2683,7 @@ __metadata: languageName: node linkType: hard -"asn1@npm:^0.2.4": +"asn1@npm:^0.2.4, asn1@npm:^0.2.6": version: 0.2.6 resolution: "asn1@npm:0.2.6" dependencies: @@ -2422,6 +2703,13 @@ __metadata: languageName: node linkType: hard +"async-lock@npm:^1.4.1": + version: 1.4.1 + resolution: "async-lock@npm:1.4.1" + checksum: 10c0/f696991c7d894af1dc91abc81cc4f14b3785190a35afb1646d8ab91138238d55cabd83bfdd56c42663a008d72b3dc39493ff83797e550effc577d1ccbde254af + languageName: node + linkType: hard + "async@npm:3.2.3": version: 3.2.3 resolution: "async@npm:3.2.3" @@ -2438,13 +2726,20 @@ __metadata: languageName: node linkType: hard -"async@npm:^3.2.3": +"async@npm:^3.2.3, async@npm:^3.2.4": version: 3.2.6 resolution: "async@npm:3.2.6" checksum: 10c0/36484bb15ceddf07078688d95e27076379cc2f87b10c03b6dd8a83e89475a3c8df5848859dd06a4c95af1e4c16fc973de0171a77f18ea00be899aca2a4f85e70 languageName: node linkType: hard +"b4a@npm:^1.6.4": + version: 1.6.7 + resolution: "b4a@npm:1.6.7" + checksum: 10c0/ec2f004d1daae04be8c5a1f8aeb7fea213c34025e279db4958eb0b82c1729ee25f7c6e89f92a5f65c8a9cf2d017ce27e3dda912403341d1781bd74528a4849d4 + languageName: node + linkType: hard + "babel-jest@npm:^29.7.0": version: 29.7.0 resolution: "babel-jest@npm:29.7.0" @@ -2531,6 +2826,49 @@ __metadata: languageName: node linkType: hard +"bare-events@npm:^2.0.0, bare-events@npm:^2.2.0": + version: 2.5.0 + resolution: "bare-events@npm:2.5.0" + checksum: 10c0/afbeec4e8be4d93fb4a3be65c3b4a891a2205aae30b5a38fafd42976cc76cf30dad348963fe330a0d70186e15dc507c11af42c89af5dddab2a54e5aff02e2896 + languageName: node + linkType: hard + +"bare-fs@npm:^2.1.1": + version: 2.3.5 + resolution: "bare-fs@npm:2.3.5" + dependencies: + bare-events: "npm:^2.0.0" + bare-path: "npm:^2.0.0" + bare-stream: "npm:^2.0.0" + checksum: 10c0/ff18cc9be7c557c38e0342681ba3672ae4b01e5696b567d4035e5995255dc6bc7d4df88ed210fa4d3eb940eb29512e924ebb42814c87fc59a2bee8cf83b7c2f9 + languageName: node + linkType: hard + +"bare-os@npm:^2.1.0": + version: 2.4.4 + resolution: "bare-os@npm:2.4.4" + checksum: 10c0/e7d1a7b2100c05da8d25b60d0d48cf850c6f57064577a3f2f51cf18d417fbcfd6967ed2d8314320914ed69e0f2ebcf54eb1b36092dd172d8e8f969cf8cccf041 + languageName: node + linkType: hard + +"bare-path@npm:^2.0.0, bare-path@npm:^2.1.0": + version: 2.1.3 + resolution: "bare-path@npm:2.1.3" + dependencies: + bare-os: "npm:^2.1.0" + checksum: 10c0/35587e177fc8fa5b13fb90bac8779b5ce49c99016d221ddaefe2232d02bd4295d79b941e14ae19fda75ec42a6fe5fb66c07d83ae7ec11462178e66b7be65ca74 + languageName: node + linkType: hard + +"bare-stream@npm:^2.0.0": + version: 2.3.2 + resolution: "bare-stream@npm:2.3.2" + dependencies: + streamx: "npm:^2.20.0" + checksum: 10c0/e2bda606c2cbd6acbb2558d9a5f6d2d4bc08fb635d32d599bc8e74c1d2298c956decf6a3a820e485a760bb73b8a7f0e743ec5262f08cccbaf5eeb599253d4221 + languageName: node + linkType: hard + "base64-js@npm:^1.3.1": version: 1.5.1 resolution: "base64-js@npm:1.5.1" @@ -2547,6 +2885,17 @@ __metadata: languageName: node linkType: hard +"bl@npm:^4.0.3": + version: 4.1.0 + resolution: "bl@npm:4.1.0" + dependencies: + buffer: "npm:^5.5.0" + inherits: "npm:^2.0.4" + readable-stream: "npm:^3.4.0" + checksum: 10c0/02847e1d2cb089c9dc6958add42e3cdeaf07d13f575973963335ac0fdece563a50ac770ac4c8fa06492d2dd276f6cc3b7f08c7cd9c7a7ad0f8d388b2a28def5f + languageName: node + linkType: hard + "blockstore-core@npm:^5.0.2": version: 5.0.2 resolution: "blockstore-core@npm:5.0.2" @@ -2657,6 +3006,13 @@ __metadata: languageName: node linkType: hard +"buffer-crc32@npm:^1.0.0": + version: 1.0.0 + resolution: "buffer-crc32@npm:1.0.0" + checksum: 10c0/8b86e161cee4bb48d5fa622cbae4c18f25e4857e5203b89e23de59e627ab26beb82d9d7999f2b8de02580165f61f83f997beaf02980cdf06affd175b651921ab + languageName: node + linkType: hard + "buffer-from@npm:^1.0.0": version: 1.1.2 resolution: "buffer-from@npm:1.1.2" @@ -2664,6 +3020,16 @@ __metadata: languageName: node linkType: hard +"buffer@npm:^5.5.0": + version: 5.7.1 + resolution: "buffer@npm:5.7.1" + dependencies: + base64-js: "npm:^1.3.1" + ieee754: "npm:^1.1.13" + checksum: 10c0/27cac81cff434ed2876058d72e7c4789d11ff1120ef32c9de48f59eab58179b66710c488987d295ae89a228f835fc66d088652dffeb8e3ba8659f80eb091d55e + languageName: node + linkType: hard + "buffer@npm:^6.0.3": version: 6.0.3 resolution: "buffer@npm:6.0.3" @@ -2684,6 +3050,13 @@ __metadata: languageName: node linkType: hard +"buildcheck@npm:~0.0.6": + version: 0.0.6 + resolution: "buildcheck@npm:0.0.6" + checksum: 10c0/8cbdb89f41bc484b8325f4828db4135b206a0dffb641eb6cdb2b7022483c45dd0e5aac6d820c9a67bdd2caab3a02c76d7ceec7bd9ec494b5a2270d2806b01a76 + languageName: node + linkType: hard + "busboy@npm:^1.0.0": version: 1.6.0 resolution: "busboy@npm:1.6.0" @@ -2693,6 +3066,13 @@ __metadata: languageName: node linkType: hard +"byline@npm:^5.0.0": + version: 5.0.0 + resolution: "byline@npm:5.0.0" + checksum: 10c0/33fb64cd84440b3652a99a68d732c56ef18a748ded495ba38e7756a242fab0d4654b9b8ce269fd0ac14c5f97aa4e3c369613672b280a1f60b559b34223105c85 + languageName: node + linkType: hard + "bytes@npm:3.1.2": version: 3.1.2 resolution: "bytes@npm:3.1.2" @@ -2787,6 +3167,13 @@ __metadata: languageName: node linkType: hard +"chownr@npm:^1.1.1": + version: 1.1.4 + resolution: "chownr@npm:1.1.4" + checksum: 10c0/ed57952a84cc0c802af900cf7136de643d3aba2eecb59d29344bc2f3f9bf703a301b9d84cdc71f82c3ffc9ccde831b0d92f5b45f91727d6c9da62f23aef9d9db + languageName: node + linkType: hard + "chownr@npm:^2.0.0": version: 2.0.0 resolution: "chownr@npm:2.0.0" @@ -2874,6 +3261,19 @@ __metadata: languageName: node linkType: hard +"compress-commons@npm:^6.0.2": + version: 6.0.2 + resolution: "compress-commons@npm:6.0.2" + dependencies: + crc-32: "npm:^1.2.0" + crc32-stream: "npm:^6.0.0" + is-stream: "npm:^2.0.1" + normalize-path: "npm:^3.0.0" + readable-stream: "npm:^4.0.0" + checksum: 10c0/2347031b7c92c8ed5011b07b93ec53b298fa2cd1800897532ac4d4d1aeae06567883f481b6e35f13b65fc31b190c751df6635434d525562f0203fde76f1f0814 + languageName: node + linkType: hard + "concat-map@npm:0.0.1": version: 0.0.1 resolution: "concat-map@npm:0.0.1" @@ -2957,6 +3357,36 @@ __metadata: languageName: node linkType: hard +"cpu-features@npm:~0.0.10": + version: 0.0.10 + resolution: "cpu-features@npm:0.0.10" + dependencies: + buildcheck: "npm:~0.0.6" + nan: "npm:^2.19.0" + node-gyp: "npm:latest" + checksum: 10c0/0c4a12904657b22477ffbcfd2b4b2bdd45b174f283616b18d9e1ade495083f9f6098493feb09f4ae2d0b36b240f9ecd32cfb4afe210cf0d0f8f0cc257bd58e54 + languageName: node + linkType: hard + +"crc-32@npm:^1.2.0": + version: 1.2.2 + resolution: "crc-32@npm:1.2.2" + bin: + crc32: bin/crc32.njs + checksum: 10c0/11dcf4a2e77ee793835d49f2c028838eae58b44f50d1ff08394a610bfd817523f105d6ae4d9b5bef0aad45510f633eb23c903e9902e4409bed1ce70cb82b9bf0 + languageName: node + linkType: hard + +"crc32-stream@npm:^6.0.0": + version: 6.0.0 + resolution: "crc32-stream@npm:6.0.0" + dependencies: + crc-32: "npm:^1.2.0" + readable-stream: "npm:^4.0.0" + checksum: 10c0/bf9c84571ede2d119c2b4f3a9ef5eeb9ff94b588493c0d3862259af86d3679dcce1c8569dd2b0a6eff2f35f5e2081cc1263b846d2538d4054da78cf34f262a3d + languageName: node + linkType: hard + "create-jest@npm:^29.7.0": version: 29.7.0 resolution: "create-jest@npm:29.7.0" @@ -3015,9 +3445,13 @@ __metadata: dependencies: "@autonomys/auto-dag-data": "npm:^1.0.8" "@autonomys/auto-drive": "npm:^1.0.5" + "@faker-js/faker": "npm:^9.2.0" "@polkadot/api": "npm:^12.3.1" "@polkadot/types": "npm:^13.0.1" "@polkadot/util-crypto": "npm:^13.0.2" + "@swc/core": "npm:^1.9.2" + "@swc/jest": "npm:^0.2.37" + "@testcontainers/postgresql": "npm:^10.14.0" "@types/cors": "npm:^2.8.17" "@types/express": "npm:^4.17.21" "@types/jest": "npm:^29.5.12" @@ -3035,6 +3469,7 @@ __metadata: cors: "npm:^2.8.5" db-migrate: "npm:^0.11.14" db-migrate-pg: "npm:^1.5.2" + db-migrate-plugin-typescript: "npm:^2.0.0" dotenv: "npm:^16.4.5" eslint: "npm:^8.57.1" eslint-config-prettier: "npm:^9.1.0" @@ -3050,7 +3485,7 @@ __metadata: pg-format: "npm:^1.0.4" pizzip: "npm:^3.1.7" prettier: "npm:^3.3.3" - ts-jest: "npm:^29.2.4" + ts-jest: "npm:^29.2.5" ts-node: "npm:^10.9.2" typescript: "npm:^5.6.3" uuid: "npm:^10.0.0" @@ -3087,6 +3522,15 @@ __metadata: languageName: node linkType: hard +"db-migrate-plugin-typescript@npm:^2.0.0": + version: 2.0.0 + resolution: "db-migrate-plugin-typescript@npm:2.0.0" + peerDependencies: + ts-node: ^3.3.0 + checksum: 10c0/6418fb1b58647c3ea7b57a1df10acf2c9901c0b456acc1a5f0efd009492e536a71402dfe6613e81ca4c6a5bd5f989e2621d5d44b9bec039eae1e0f029041877d + languageName: node + linkType: hard + "db-migrate-shared@npm:^1.2.0": version: 1.2.0 resolution: "db-migrate-shared@npm:1.2.0" @@ -3128,7 +3572,7 @@ __metadata: languageName: node linkType: hard -"debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4": +"debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4, debug@npm:^4.3.5": version: 4.3.7 resolution: "debug@npm:4.3.7" dependencies: @@ -3235,6 +3679,38 @@ __metadata: languageName: node linkType: hard +"docker-compose@npm:^0.24.8": + version: 0.24.8 + resolution: "docker-compose@npm:0.24.8" + dependencies: + yaml: "npm:^2.2.2" + checksum: 10c0/1494389e554fed8aabf9fef24210a641cd2442028b1462d7f68186919f5e75045f7bfb4ccaf47c94ed879dcb63e4d82885c389399f531550c4b244920740b2b3 + languageName: node + linkType: hard + +"docker-modem@npm:^3.0.0": + version: 3.0.8 + resolution: "docker-modem@npm:3.0.8" + dependencies: + debug: "npm:^4.1.1" + readable-stream: "npm:^3.5.0" + split-ca: "npm:^1.0.1" + ssh2: "npm:^1.11.0" + checksum: 10c0/5c00592297fabd78454621fe765a5ef0daea4bbb6692e239ad65b111f4da9d750178f448f8efcaf84f9f999598eb735bc14ad6bf5f0a2dcf9c2d453d5b683540 + languageName: node + linkType: hard + +"dockerode@npm:^3.3.5": + version: 3.3.5 + resolution: "dockerode@npm:3.3.5" + dependencies: + "@balena/dockerignore": "npm:^1.0.2" + docker-modem: "npm:^3.0.0" + tar-fs: "npm:~2.0.1" + checksum: 10c0/c45fa8ed3ad76f13fe7799d539a60fe466f8e34bea06b30d75be9e08bc00536cc9ff2d54e38fbb3b2a8a382bf9d4459a27741e6454ce7d0cda5cd35c51224c73 + languageName: node + linkType: hard + "doctrine@npm:^3.0.0": version: 3.0.0 resolution: "doctrine@npm:3.0.0" @@ -3334,6 +3810,15 @@ __metadata: languageName: node linkType: hard +"end-of-stream@npm:^1.1.0, end-of-stream@npm:^1.4.1": + version: 1.4.4 + resolution: "end-of-stream@npm:1.4.4" + dependencies: + once: "npm:^1.4.0" + checksum: 10c0/870b423afb2d54bb8d243c63e07c170409d41e20b47eeef0727547aea5740bd6717aca45597a9f2745525667a6b804c1e7bede41f856818faee5806dd9ff3975 + languageName: node + linkType: hard + "entities@npm:^4.4.0": version: 4.5.0 resolution: "entities@npm:4.5.0" @@ -3657,6 +4142,13 @@ __metadata: languageName: node linkType: hard +"event-target-shim@npm:^5.0.0": + version: 5.0.1 + resolution: "event-target-shim@npm:5.0.1" + checksum: 10c0/0255d9f936215fd206156fd4caa9e8d35e62075d720dc7d847e89b417e5e62cf1ce6c9b4e0a1633a9256de0efefaf9f8d26924b1f3c8620cffb9db78e7d3076b + languageName: node + linkType: hard + "eventemitter3@npm:^5.0.1": version: 5.0.1 resolution: "eventemitter3@npm:5.0.1" @@ -3664,6 +4156,13 @@ __metadata: languageName: node linkType: hard +"events@npm:^3.3.0": + version: 3.3.0 + resolution: "events@npm:3.3.0" + checksum: 10c0/d6b6f2adbccbcda74ddbab52ed07db727ef52e31a61ed26db9feb7dc62af7fc8e060defa65e5f8af9449b86b52cc1a1f6a79f2eafcf4e62add2b7a1fa4a432f6 + languageName: node + linkType: hard + "execa@npm:^5.0.0": version: 5.1.1 resolution: "execa@npm:5.1.1" @@ -3777,6 +4276,13 @@ __metadata: languageName: node linkType: hard +"fast-fifo@npm:^1.2.0, fast-fifo@npm:^1.3.2": + version: 1.3.2 + resolution: "fast-fifo@npm:1.3.2" + checksum: 10c0/d53f6f786875e8b0529f784b59b4b05d4b5c31c651710496440006a398389a579c8dbcd2081311478b5bf77f4b0b21de69109c5a4eabea9d8e8783d1eb864e4c + languageName: node + linkType: hard + "fast-glob@npm:^3.3.2": version: 3.3.2 resolution: "fast-glob@npm:3.3.2" @@ -3962,6 +4468,13 @@ __metadata: languageName: node linkType: hard +"fs-constants@npm:^1.0.0": + version: 1.0.0 + resolution: "fs-constants@npm:1.0.0" + checksum: 10c0/a0cde99085f0872f4d244e83e03a46aa387b74f5a5af750896c6b05e9077fac00e9932fdf5aef84f2f16634cd473c63037d7a512576da7d5c2b9163d1909f3a8 + languageName: node + linkType: hard + "fs-minipass@npm:^2.0.0": version: 2.1.0 resolution: "fs-minipass@npm:2.1.0" @@ -4047,6 +4560,13 @@ __metadata: languageName: node linkType: hard +"get-port@npm:^5.1.1": + version: 5.1.1 + resolution: "get-port@npm:5.1.1" + checksum: 10c0/2873877a469b24e6d5e0be490724a17edb39fafc795d1d662e7bea951ca649713b4a50117a473f9d162312cb0e946597bd0e049ed2f866e79e576e8e213d3d1c + languageName: node + linkType: hard + "get-stream@npm:^6.0.0": version: 6.0.1 resolution: "get-stream@npm:6.0.1" @@ -4072,7 +4592,7 @@ __metadata: languageName: node linkType: hard -"glob@npm:^10.2.2, glob@npm:^10.3.10": +"glob@npm:^10.0.0, glob@npm:^10.2.2, glob@npm:^10.3.10": version: 10.4.5 resolution: "glob@npm:10.4.5" dependencies: @@ -4140,7 +4660,7 @@ __metadata: languageName: node linkType: hard -"graceful-fs@npm:^4.1.9, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": +"graceful-fs@npm:^4.1.9, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": version: 4.2.11 resolution: "graceful-fs@npm:4.2.11" checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 @@ -4272,7 +4792,7 @@ __metadata: languageName: node linkType: hard -"ieee754@npm:^1.2.1": +"ieee754@npm:^1.1.13, ieee754@npm:^1.2.1": version: 1.2.1 resolution: "ieee754@npm:1.2.1" checksum: 10c0/b0782ef5e0935b9f12883a2e2aa37baa75da6e66ce6515c168697b42160807d9330de9a32ec1ed73149aea02e0d822e572bca6f1e22bdcbd2149e13b050b17bb @@ -4346,7 +4866,7 @@ __metadata: languageName: node linkType: hard -"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.3, inherits@npm:~2.0.3": +"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.3": version: 2.0.4 resolution: "inherits@npm:2.0.4" checksum: 10c0/4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 @@ -4471,7 +4991,7 @@ __metadata: languageName: node linkType: hard -"is-stream@npm:^2.0.0": +"is-stream@npm:^2.0.0, is-stream@npm:^2.0.1": version: 2.0.1 resolution: "is-stream@npm:2.0.1" checksum: 10c0/7c284241313fc6efc329b8d7f08e16c0efeb6baab1b4cd0ba579eb78e5af1aa5da11e68559896a2067cd6c526bd29241dda4eb1225e627d5aa1a89a76d4635a5 @@ -5216,6 +5736,13 @@ __metadata: languageName: node linkType: hard +"jsonc-parser@npm:^3.2.0": + version: 3.3.1 + resolution: "jsonc-parser@npm:3.3.1" + checksum: 10c0/269c3ae0a0e4f907a914bf334306c384aabb9929bd8c99f909275ebd5c2d3bc70b9bcd119ad794f339dec9f24b6a4ee9cd5a8ab2e6435e730ad4075388fc2ab6 + languageName: node + linkType: hard + "jszip@npm:^3.10.1": version: 3.10.1 resolution: "jszip@npm:3.10.1" @@ -5253,6 +5780,15 @@ __metadata: languageName: node linkType: hard +"lazystream@npm:^1.0.0": + version: 1.0.1 + resolution: "lazystream@npm:1.0.1" + dependencies: + readable-stream: "npm:^2.0.5" + checksum: 10c0/ea4e509a5226ecfcc303ba6782cc269be8867d372b9bcbd625c88955df1987ea1a20da4643bf9270336415a398d33531ebf0d5f0d393b9283dc7c98bfcbd7b69 + languageName: node + linkType: hard + "leven@npm:^3.1.0": version: 3.1.0 resolution: "leven@npm:3.1.0" @@ -5561,7 +6097,7 @@ __metadata: languageName: node linkType: hard -"minimatch@npm:^5.0.1": +"minimatch@npm:^5.0.1, minimatch@npm:^5.1.0": version: 5.1.6 resolution: "minimatch@npm:5.1.6" dependencies: @@ -5670,6 +6206,13 @@ __metadata: languageName: node linkType: hard +"mkdirp-classic@npm:^0.5.2": + version: 0.5.3 + resolution: "mkdirp-classic@npm:0.5.3" + checksum: 10c0/95371d831d196960ddc3833cc6907e6b8f67ac5501a6582f47dfae5eb0f092e9f8ce88e0d83afcae95d6e2b61a01741ba03714eeafb6f7a6e9dcc158ac85b168 + languageName: node + linkType: hard + "mkdirp@npm:^0.5.4, mkdirp@npm:~0.5.0": version: 0.5.6 resolution: "mkdirp@npm:0.5.6" @@ -5754,7 +6297,7 @@ __metadata: languageName: node linkType: hard -"nan@npm:^2.14.1, nan@npm:^2.15.0": +"nan@npm:^2.14.1, nan@npm:^2.15.0, nan@npm:^2.19.0, nan@npm:^2.20.0": version: 2.22.0 resolution: "nan@npm:2.22.0" dependencies: @@ -5930,7 +6473,7 @@ __metadata: languageName: node linkType: hard -"once@npm:^1.3.0": +"once@npm:^1.3.0, once@npm:^1.3.1, once@npm:^1.4.0": version: 1.4.0 resolution: "once@npm:1.4.0" dependencies: @@ -6428,6 +6971,13 @@ __metadata: languageName: node linkType: hard +"process@npm:^0.11.10": + version: 0.11.10 + resolution: "process@npm:0.11.10" + checksum: 10c0/40c3ce4b7e6d4b8c3355479df77aeed46f81b279818ccdc500124e6a5ab882c0cc81ff7ea16384873a95a74c4570b01b120f287abbdd4c877931460eca6084b3 + languageName: node + linkType: hard + "progress-events@npm:^1.0.0": version: 1.0.1 resolution: "progress-events@npm:1.0.1" @@ -6475,6 +7025,26 @@ __metadata: languageName: node linkType: hard +"proper-lockfile@npm:^4.1.2": + version: 4.1.2 + resolution: "proper-lockfile@npm:4.1.2" + dependencies: + graceful-fs: "npm:^4.2.4" + retry: "npm:^0.12.0" + signal-exit: "npm:^3.0.2" + checksum: 10c0/2f265dbad15897a43110a02dae55105c04d356ec4ed560723dcb9f0d34bc4fb2f13f79bb930e7561be10278e2314db5aca2527d5d3dcbbdee5e6b331d1571f6d + languageName: node + linkType: hard + +"properties-reader@npm:^2.3.0": + version: 2.3.0 + resolution: "properties-reader@npm:2.3.0" + dependencies: + mkdirp: "npm:^1.0.4" + checksum: 10c0/f665057e3a9076c643ba1198afcc71703eda227a59913252f7ff9467ece8d29c0cf8bf14bf1abcaef71570840c32a4e257e6c39b7550451bbff1a777efcf5667 + languageName: node + linkType: hard + "protobufjs-cli@npm:^1.0.0": version: 1.1.3 resolution: "protobufjs-cli@npm:1.1.3" @@ -6552,6 +7122,16 @@ __metadata: languageName: node linkType: hard +"pump@npm:^3.0.0": + version: 3.0.2 + resolution: "pump@npm:3.0.2" + dependencies: + end-of-stream: "npm:^1.1.0" + once: "npm:^1.3.1" + checksum: 10c0/5ad655cb2a7738b4bcf6406b24ad0970d680649d996b55ad20d1be8e0c02394034e4c45ff7cd105d87f1e9b96a0e3d06fd28e11fae8875da26e7f7a8e2c9726f + languageName: node + linkType: hard + "punycode.js@npm:^2.3.1": version: 2.3.1 resolution: "punycode.js@npm:2.3.1" @@ -6605,6 +7185,13 @@ __metadata: languageName: node linkType: hard +"queue-tick@npm:^1.0.1": + version: 1.0.1 + resolution: "queue-tick@npm:1.0.1" + checksum: 10c0/0db998e2c9b15215317dbcf801e9b23e6bcde4044e115155dae34f8e7454b9a783f737c9a725528d677b7a66c775eb7a955cf144fe0b87f62b575ce5bfd515a9 + languageName: node + linkType: hard + "range-parser@npm:~1.2.1": version: 1.2.1 resolution: "range-parser@npm:1.2.1" @@ -6654,7 +7241,7 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:^2.2.2, readable-stream@npm:~2.3.6": +"readable-stream@npm:^2.0.5, readable-stream@npm:^2.2.2, readable-stream@npm:~2.3.6": version: 2.3.8 resolution: "readable-stream@npm:2.3.8" dependencies: @@ -6669,6 +7256,39 @@ __metadata: languageName: node linkType: hard +"readable-stream@npm:^3.1.1, readable-stream@npm:^3.4.0, readable-stream@npm:^3.5.0": + version: 3.6.2 + resolution: "readable-stream@npm:3.6.2" + dependencies: + inherits: "npm:^2.0.3" + string_decoder: "npm:^1.1.1" + util-deprecate: "npm:^1.0.1" + checksum: 10c0/e37be5c79c376fdd088a45fa31ea2e423e5d48854be7a22a58869b4e84d25047b193f6acb54f1012331e1bcd667ffb569c01b99d36b0bd59658fb33f513511b7 + languageName: node + linkType: hard + +"readable-stream@npm:^4.0.0": + version: 4.5.2 + resolution: "readable-stream@npm:4.5.2" + dependencies: + abort-controller: "npm:^3.0.0" + buffer: "npm:^6.0.3" + events: "npm:^3.3.0" + process: "npm:^0.11.10" + string_decoder: "npm:^1.3.0" + checksum: 10c0/a2c80e0e53aabd91d7df0330929e32d0a73219f9477dbbb18472f6fdd6a11a699fc5d172a1beff98d50eae4f1496c950ffa85b7cc2c4c196963f289a5f39275d + languageName: node + linkType: hard + +"readdir-glob@npm:^1.1.2": + version: 1.1.3 + resolution: "readdir-glob@npm:1.1.3" + dependencies: + minimatch: "npm:^5.1.0" + checksum: 10c0/a37e0716726650845d761f1041387acd93aa91b28dd5381950733f994b6c349ddc1e21e266ec7cc1f9b92e205a7a972232f9b89d5424d07361c2c3753d5dbace + languageName: node + linkType: hard + "require-directory@npm:^2.1.1": version: 2.1.1 resolution: "require-directory@npm:2.1.1" @@ -6798,7 +7418,7 @@ __metadata: languageName: node linkType: hard -"safe-buffer@npm:5.2.1": +"safe-buffer@npm:5.2.1, safe-buffer@npm:~5.2.0": version: 5.2.1 resolution: "safe-buffer@npm:5.2.1" checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 @@ -6949,7 +7569,7 @@ __metadata: languageName: node linkType: hard -"signal-exit@npm:^3.0.3, signal-exit@npm:^3.0.7": +"signal-exit@npm:^3.0.2, signal-exit@npm:^3.0.3, signal-exit@npm:^3.0.7": version: 3.0.7 resolution: "signal-exit@npm:3.0.7" checksum: 10c0/25d272fa73e146048565e08f3309d5b942c1979a6f4a58a8c59d5fa299728e9c2fcd1a759ec870863b1fd38653670240cd420dad2ad9330c71f36608a6a1c912 @@ -7031,6 +7651,13 @@ __metadata: languageName: node linkType: hard +"split-ca@npm:^1.0.1": + version: 1.0.1 + resolution: "split-ca@npm:1.0.1" + checksum: 10c0/f339170b84c6b4706fcf4c60cc84acb36574c0447566bd713301a8d9b4feff7f4627efc8c334bec24944a3e2f35bc596bd58c673c9980d6bfe3137aae1116ba7 + languageName: node + linkType: hard + "split2@npm:^4.1.0": version: 4.2.0 resolution: "split2@npm:4.2.0" @@ -7052,6 +7679,16 @@ __metadata: languageName: node linkType: hard +"ssh-remote-port-forward@npm:^1.0.4": + version: 1.0.4 + resolution: "ssh-remote-port-forward@npm:1.0.4" + dependencies: + "@types/ssh2": "npm:^0.5.48" + ssh2: "npm:^1.4.0" + checksum: 10c0/33a441af12817577ea30d089b03c19f980d2fb2370933123a35026dc6be40f2dfce067e4dfc173e23d745464537ff647aa1bb7469be5571cc21f7cdb25181c09 + languageName: node + linkType: hard + "ssh2@npm:1.4.0": version: 1.4.0 resolution: "ssh2@npm:1.4.0" @@ -7069,6 +7706,23 @@ __metadata: languageName: node linkType: hard +"ssh2@npm:^1.11.0, ssh2@npm:^1.4.0": + version: 1.16.0 + resolution: "ssh2@npm:1.16.0" + dependencies: + asn1: "npm:^0.2.6" + bcrypt-pbkdf: "npm:^1.0.2" + cpu-features: "npm:~0.0.10" + nan: "npm:^2.20.0" + dependenciesMeta: + cpu-features: + optional: true + nan: + optional: true + checksum: 10c0/d336a85d87501c64ba230b6c1a2901a9b0e376fe7f7a1640a7f8dbdafe674b2e1a5dc6236ffd1329969dc0cf03cd57759b28743075e61229a984065ee1d56bed + languageName: node + linkType: hard + "ssri@npm:^10.0.0": version: 10.0.6 resolution: "ssri@npm:10.0.6" @@ -7108,6 +7762,21 @@ __metadata: languageName: node linkType: hard +"streamx@npm:^2.15.0, streamx@npm:^2.20.0": + version: 2.20.2 + resolution: "streamx@npm:2.20.2" + dependencies: + bare-events: "npm:^2.2.0" + fast-fifo: "npm:^1.3.2" + queue-tick: "npm:^1.0.1" + text-decoder: "npm:^1.1.0" + dependenciesMeta: + bare-events: + optional: true + checksum: 10c0/2ad68b9426e0211c1198b5b5dd7280088793c6792e1f8e2a8fbd2487d483f35ee13b0b46edfa247daad2132d6b0abc21af4eaa4a4c099ff4cd11fcff83e6ce3e + languageName: node + linkType: hard + "string-length@npm:^4.0.1": version: 4.0.2 resolution: "string-length@npm:4.0.2" @@ -7140,6 +7809,15 @@ __metadata: languageName: node linkType: hard +"string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": + version: 1.3.0 + resolution: "string_decoder@npm:1.3.0" + dependencies: + safe-buffer: "npm:~5.2.0" + checksum: 10c0/810614ddb030e271cd591935dcd5956b2410dd079d64ff92a1844d6b7588bf992b3e1b69b0f4d34a3e06e0bd73046ac646b5264c1987b20d0601f81ef35d731d + languageName: node + linkType: hard + "string_decoder@npm:~1.1.1": version: 1.1.1 resolution: "string_decoder@npm:1.1.1" @@ -7237,6 +7915,59 @@ __metadata: languageName: node linkType: hard +"tar-fs@npm:^3.0.6": + version: 3.0.6 + resolution: "tar-fs@npm:3.0.6" + dependencies: + bare-fs: "npm:^2.1.1" + bare-path: "npm:^2.1.0" + pump: "npm:^3.0.0" + tar-stream: "npm:^3.1.5" + dependenciesMeta: + bare-fs: + optional: true + bare-path: + optional: true + checksum: 10c0/207b7c0f193495668bd9dbad09a0108ce4ffcfec5bce2133f90988cdda5c81fad83c99f963d01e47b565196594f7a17dbd063ae55b97b36268fcc843975278ee + languageName: node + linkType: hard + +"tar-fs@npm:~2.0.1": + version: 2.0.1 + resolution: "tar-fs@npm:2.0.1" + dependencies: + chownr: "npm:^1.1.1" + mkdirp-classic: "npm:^0.5.2" + pump: "npm:^3.0.0" + tar-stream: "npm:^2.0.0" + checksum: 10c0/0128e888b61c7c4e8e7997d66ceccc3c79d73c01e87cfcc3d9f6b8555b0c88b8d67d91ff167f00b067f726dde497b2d1fb2bba0cfcb3ccb95ae413cb86c715bc + languageName: node + linkType: hard + +"tar-stream@npm:^2.0.0": + version: 2.2.0 + resolution: "tar-stream@npm:2.2.0" + dependencies: + bl: "npm:^4.0.3" + end-of-stream: "npm:^1.4.1" + fs-constants: "npm:^1.0.0" + inherits: "npm:^2.0.3" + readable-stream: "npm:^3.1.1" + checksum: 10c0/2f4c910b3ee7196502e1ff015a7ba321ec6ea837667220d7bcb8d0852d51cb04b87f7ae471008a6fb8f5b1a1b5078f62f3a82d30c706f20ada1238ac797e7692 + languageName: node + linkType: hard + +"tar-stream@npm:^3.0.0, tar-stream@npm:^3.1.5": + version: 3.1.7 + resolution: "tar-stream@npm:3.1.7" + dependencies: + b4a: "npm:^1.6.4" + fast-fifo: "npm:^1.2.0" + streamx: "npm:^2.15.0" + checksum: 10c0/a09199d21f8714bd729993ac49b6c8efcb808b544b89f23378ad6ffff6d1cb540878614ba9d4cfec11a64ef39e1a6f009a5398371491eb1fda606ffc7f70f718 + languageName: node + linkType: hard + "tar@npm:^6.1.11, tar@npm:^6.2.1": version: 6.2.1 resolution: "tar@npm:6.2.1" @@ -7262,6 +7993,36 @@ __metadata: languageName: node linkType: hard +"testcontainers@npm:^10.14.0": + version: 10.14.0 + resolution: "testcontainers@npm:10.14.0" + dependencies: + "@balena/dockerignore": "npm:^1.0.2" + "@types/dockerode": "npm:^3.3.29" + archiver: "npm:^7.0.1" + async-lock: "npm:^1.4.1" + byline: "npm:^5.0.0" + debug: "npm:^4.3.5" + docker-compose: "npm:^0.24.8" + dockerode: "npm:^3.3.5" + get-port: "npm:^5.1.1" + proper-lockfile: "npm:^4.1.2" + properties-reader: "npm:^2.3.0" + ssh-remote-port-forward: "npm:^1.0.4" + tar-fs: "npm:^3.0.6" + tmp: "npm:^0.2.3" + undici: "npm:^5.28.4" + checksum: 10c0/b988205b3402cac4873c6592d6f57d4e4fded1e9fd10a748d861028a961fc9d4a8d97c41bf44da207f9de85b73632c51ec152502aa00ae77cbbc6e57fd02205a + languageName: node + linkType: hard + +"text-decoder@npm:^1.1.0": + version: 1.2.1 + resolution: "text-decoder@npm:1.2.1" + checksum: 10c0/deea9e3f4bde3b8990439e59cd52b2e917a416e29fbaf607052c89117c7148f1831562c099e9dd49abea0839cffdeb75a3c8f1f137f1686afd2808322f8e3f00 + languageName: node + linkType: hard + "text-table@npm:^0.2.0": version: 0.2.0 resolution: "text-table@npm:0.2.0" @@ -7269,7 +8030,7 @@ __metadata: languageName: node linkType: hard -"tmp@npm:^0.2.1": +"tmp@npm:^0.2.1, tmp@npm:^0.2.3": version: 0.2.3 resolution: "tmp@npm:0.2.3" checksum: 10c0/3e809d9c2f46817475b452725c2aaa5d11985cf18d32a7a970ff25b568438e2c076c2e8609224feef3b7923fa9749b74428e3e634f6b8e520c534eef2fd24125 @@ -7308,7 +8069,7 @@ __metadata: languageName: node linkType: hard -"ts-jest@npm:^29.2.4": +"ts-jest@npm:^29.2.5": version: 29.2.5 resolution: "ts-jest@npm:29.2.5" dependencies: @@ -7551,6 +8312,13 @@ __metadata: languageName: node linkType: hard +"undici-types@npm:~5.26.4": + version: 5.26.5 + resolution: "undici-types@npm:5.26.5" + checksum: 10c0/bb673d7876c2d411b6eb6c560e0c571eef4a01c1c19925175d16e3a30c4c428181fb8d7ae802a261f283e4166a0ac435e2f505743aa9e45d893f9a3df017b501 + languageName: node + linkType: hard + "undici-types@npm:~6.19.8": version: 6.19.8 resolution: "undici-types@npm:6.19.8" @@ -7558,6 +8326,15 @@ __metadata: languageName: node linkType: hard +"undici@npm:^5.28.4": + version: 5.28.4 + resolution: "undici@npm:5.28.4" + dependencies: + "@fastify/busboy": "npm:^2.0.0" + checksum: 10c0/08d0f2596553aa0a54ca6e8e9c7f45aef7d042c60918564e3a142d449eda165a80196f6ef19ea2ef2e6446959e293095d8e40af1236f0d67223b06afac5ecad7 + languageName: node + linkType: hard + "unique-filename@npm:^3.0.0": version: 3.0.0 resolution: "unique-filename@npm:3.0.0" @@ -7616,7 +8393,7 @@ __metadata: languageName: node linkType: hard -"util-deprecate@npm:~1.0.1": +"util-deprecate@npm:^1.0.1, util-deprecate@npm:~1.0.1": version: 1.0.2 resolution: "util-deprecate@npm:1.0.2" checksum: 10c0/41a5bdd214df2f6c3ecf8622745e4a366c4adced864bc3c833739791aeeeb1838119af7daed4ba36428114b5c67dcda034a79c882e97e43c03e66a4dd7389942 @@ -7888,6 +8665,15 @@ __metadata: languageName: node linkType: hard +"yaml@npm:^2.2.2": + version: 2.6.1 + resolution: "yaml@npm:2.6.1" + bin: + yaml: bin.mjs + checksum: 10c0/aebf07f61c72b38c74d2b60c3a3ccf89ee4da45bcd94b2bfb7899ba07a5257625a7c9f717c65a6fc511563d48001e01deb1d9e55f0133f3e2edf86039c8c1be7 + languageName: node + linkType: hard + "yargs-parser@npm:^18.1.2": version: 18.1.3 resolution: "yargs-parser@npm:18.1.3" @@ -7953,6 +8739,17 @@ __metadata: languageName: node linkType: hard +"zip-stream@npm:^6.0.1": + version: 6.0.1 + resolution: "zip-stream@npm:6.0.1" + dependencies: + archiver-utils: "npm:^5.0.0" + compress-commons: "npm:^6.0.2" + readable-stream: "npm:^4.0.0" + checksum: 10c0/50f2fb30327fb9d09879abf7ae2493705313adf403e794b030151aaae00009162419d60d0519e807673ec04d442e140c8879ca14314df0a0192de3b233e8f28b + languageName: node + linkType: hard + "zod@npm:^3.23.8": version: 3.23.8 resolution: "zod@npm:3.23.8"