diff --git a/src/app/data-client.spec.ts b/src/app/data-client.spec.ts index 10bdd17c8..3ba2a8ce1 100644 --- a/src/app/data-client.spec.ts +++ b/src/app/data-client.spec.ts @@ -1,6 +1,6 @@ -import { BSON } from 'bsonfy'; import { Struct, Timestamp, type JsonValue } from '@bufbuild/protobuf'; import { createRouterTransport, type Transport } from '@connectrpc/connect'; +import { BSON } from 'bsonfy'; import { beforeEach, describe, expect, it, vi } from 'vitest'; import { DataService } from '../gen/app/data/v1/data_connect'; import { @@ -33,6 +33,8 @@ import { Filter, GetDatabaseConnectionRequest, GetDatabaseConnectionResponse, + GetLatestTabularDataRequest, + GetLatestTabularDataResponse, RemoveBinaryDataFromDatasetByIDsRequest, RemoveBinaryDataFromDatasetByIDsResponse, RemoveBoundingBoxFromImageByIDRequest, @@ -50,9 +52,23 @@ import { TagsByFilterRequest, TagsByFilterResponse, TagsFilter, - GetLatestTabularDataRequest, - GetLatestTabularDataResponse, } from '../gen/app/data/v1/data_pb'; +import { DataPipelinesService } from '../gen/app/datapipelines/v1/data_pipelines_connect'; +import { + CreateDataPipelineRequest, + CreateDataPipelineResponse, + DataPipeline, + DataPipelineRun, + DataPipelineRunStatus, + DeleteDataPipelineRequest, + DeleteDataPipelineResponse, + GetDataPipelineRequest, + GetDataPipelineResponse, + ListDataPipelineRunsRequest, + ListDataPipelineRunsResponse, + ListDataPipelinesRequest, + ListDataPipelinesResponse, +} from '../gen/app/datapipelines/v1/data_pipelines_pb'; import { DatasetService } from '../gen/app/dataset/v1/dataset_connect'; import { CreateDatasetRequest, @@ -72,34 +88,25 @@ import { DataCaptureUploadRequest, DataCaptureUploadResponse, DataType, + FileData, + FileUploadRequest, + FileUploadResponse, SensorData, SensorMetadata, UploadMetadata, } from '../gen/app/datasync/v1/data_sync_pb'; -import { DataClient, type FilterOptions } from './data-client'; import { - DataPipeline, - ListDataPipelinesRequest, - ListDataPipelinesResponse, - GetDataPipelineRequest, - GetDataPipelineResponse, - CreateDataPipelineRequest, - CreateDataPipelineResponse, - DeleteDataPipelineRequest, - DeleteDataPipelineResponse, - DataPipelineRun, - DataPipelineRunStatus, - ListDataPipelineRunsRequest, - ListDataPipelineRunsResponse, -} from '../gen/app/datapipelines/v1/data_pipelines_pb'; -import { DataPipelinesService } from '../gen/app/datapipelines/v1/data_pipelines_connect'; + DataClient, + type FileUploadOptions, + type FilterOptions, +} from './data-client'; vi.mock('../gen/app/data/v1/data_pb_service'); let mockTransport: Transport; const subject = () => new DataClient(mockTransport); describe('DataClient tests', () => { - const filter = subject().createFilter({ + const filter = DataClient.createFilter({ componentName: 'testComponentName', componentType: 'testComponentType', }); @@ -1035,13 +1042,13 @@ describe('DataClient tests', () => { describe('createFilter tests', () => { it('create empty filter', () => { - const testFilter = subject().createFilter({}); + const testFilter = DataClient.createFilter({}); expect(testFilter).toEqual(new Filter()); }); it('create filter', () => { const opts = { componentName: 'camera' }; - const testFilter = subject().createFilter(opts); + const testFilter = DataClient.createFilter(opts); const expectedFilter = new Filter({ componentName: 'camera', @@ -1089,7 +1096,7 @@ describe('DataClient tests', () => { endTime, tags: tagsList, }; - const testFilter = subject().createFilter(opts); + const testFilter = DataClient.createFilter(opts); expect(testFilter.componentType).toEqual('testComponentType'); const expectedFilter = new Filter({ @@ -1716,3 +1723,129 @@ describe('DataPipelineClient tests', () => { }); }); }); + +describe('fileUpload tests', () => { + const partId = 'testPartId'; + const binaryData = new Uint8Array([1, 2, 3, 4, 5]); + const options: FileUploadOptions = { + componentType: 'componentType', + componentName: 'componentName', + methodName: 'methodName', + fileName: 'fileName', + fileExtension: '.png', + tags: ['testTag1', 'testTag2'], + datasetIds: ['dataset1', 'dataset2'], + }; + + const expectedFileId = 'testFileId'; + const expectedBinaryDataId = 'testBinaryDataId'; + + let capturedRequests: FileUploadRequest[]; + + beforeEach(() => { + capturedRequests = []; + mockTransport = createRouterTransport(({ service }) => { + service(DataSyncService, { + fileUpload: async (requests: AsyncIterable) => { + for await (const request of requests) { + capturedRequests.push(request); + } + return new FileUploadResponse({ + fileId: expectedFileId, + binaryDataId: expectedBinaryDataId, + }); + }, + }); + }); + }); + + it('uploads file with metadata and file contents', async () => { + const result = await subject().fileUpload(binaryData, partId, options); + + expect(result).toBe(expectedBinaryDataId); + expect(capturedRequests).toHaveLength(2); + + // Check metadata request + const metadataRequest = capturedRequests[0]!; + expect(metadataRequest.uploadPacket.case).toBe('metadata'); + const metadata = metadataRequest.uploadPacket.value as UploadMetadata; + expect(metadata.partId).toBe(partId); + expect(metadata.type).toBe(DataType.FILE); + expect(metadata.componentType).toBe(options.componentType); + expect(metadata.componentName).toBe(options.componentName); + expect(metadata.methodName).toBe(options.methodName); + expect(metadata.fileName).toBe(options.fileName); + expect(metadata.fileExtension).toBe(options.fileExtension); + expect(metadata.tags).toStrictEqual(options.tags); + expect(metadata.datasetIds).toStrictEqual(options.datasetIds); + + // Check file contents request + const fileContentsRequest = capturedRequests[1]!; + expect(fileContentsRequest.uploadPacket.case).toBe('fileContents'); + const fileContents = fileContentsRequest.uploadPacket.value as FileData; + expect(fileContents.data).toEqual(binaryData); + }); + + it('uploads file without optional parameters', async () => { + const result = await subject().fileUpload(binaryData, partId); + + expect(result).toBe(expectedBinaryDataId); + expect(capturedRequests).toHaveLength(2); + + // Check metadata request + const metadataRequest = capturedRequests[0]!; + expect(metadataRequest.uploadPacket.case).toBe('metadata'); + const metadata = metadataRequest.uploadPacket.value as UploadMetadata; + expect(metadata.partId).toBe(partId); + expect(metadata.type).toBe(DataType.FILE); + expect(metadata.componentType).toBe(''); + expect(metadata.componentName).toBe(''); + expect(metadata.methodName).toBe(''); + expect(metadata.fileName).toBe(''); + expect(metadata.fileExtension).toBe(''); + expect(metadata.tags).toStrictEqual([]); + expect(metadata.datasetIds).toStrictEqual([]); + + // Check file contents request + const fileContentsRequest = capturedRequests[1]!; + expect(fileContentsRequest.uploadPacket.case).toBe('fileContents'); + const fileContents = fileContentsRequest.uploadPacket.value as FileData; + expect(fileContents.data).toEqual(binaryData); + }); + + it('chunks file data', async () => { + const numChunks = 3; + const data = Uint8Array.from( + { length: DataClient.UPLOAD_CHUNK_SIZE * numChunks }, + () => Math.floor(Math.random() * 256) + ); + + const result = await subject().fileUpload(data, partId); + expect(result).toBe(expectedBinaryDataId); + expect(capturedRequests).toHaveLength(1 + numChunks); + + const metadataRequest = capturedRequests[0]!; + expect(metadataRequest.uploadPacket.case).toBe('metadata'); + + const contentRequests = capturedRequests.slice(1); + expect(contentRequests).toHaveLength(numChunks); + + const receivedLength = contentRequests.reduce( + (acc, val) => acc + (val.uploadPacket.value as FileData).data.length, + 0 + ); + expect(receivedLength).toEqual(numChunks * DataClient.UPLOAD_CHUNK_SIZE); + + const receivedData = new Uint8Array(receivedLength); + let offset = 0; + for (const req of contentRequests) { + expect(req.uploadPacket.case).toBe('fileContents'); + const fileData = req.uploadPacket.value as FileData; + expect(fileData.data).toHaveLength(DataClient.UPLOAD_CHUNK_SIZE); + receivedData.set(fileData.data, offset); + offset += fileData.data.length; + } + + expect(receivedData).toStrictEqual(data); + }); +}); diff --git a/src/app/data-client.ts b/src/app/data-client.ts index 704bfb29b..657da1af7 100644 --- a/src/app/data-client.ts +++ b/src/app/data-client.ts @@ -1,6 +1,11 @@ -import { BSON } from 'bsonfy'; -import { Struct, Timestamp, type JsonValue } from '@bufbuild/protobuf'; +import { + Struct, + Timestamp, + type JsonValue, + type PartialMessage, +} from '@bufbuild/protobuf'; import { createClient, type Client, type Transport } from '@connectrpc/connect'; +import { BSON } from 'bsonfy'; import { DataService } from '../gen/app/data/v1/data_connect'; import { BinaryID, @@ -8,25 +13,27 @@ import { CaptureMetadata, Filter, Order, - TagsFilter, TabularDataSource, TabularDataSourceType, + TagsFilter, } from '../gen/app/data/v1/data_pb'; +import { DataPipelinesService } from '../gen/app/datapipelines/v1/data_pipelines_connect'; +import { + DataPipeline, + DataPipelineRun, +} from '../gen/app/datapipelines/v1/data_pipelines_pb'; import { DatasetService } from '../gen/app/dataset/v1/dataset_connect'; import type { Dataset as PBDataset } from '../gen/app/dataset/v1/dataset_pb'; import { DataSyncService } from '../gen/app/datasync/v1/data_sync_connect'; -import { DataPipelinesService } from '../gen/app/datapipelines/v1/data_pipelines_connect'; import { DataCaptureUploadRequest, DataType, + FileData, + FileUploadRequest, SensorData, SensorMetadata, UploadMetadata, } from '../gen/app/datasync/v1/data_sync_pb'; -import { - DataPipeline, - DataPipelineRun, -} from '../gen/app/datapipelines/v1/data_pipelines_pb'; export type FilterOptions = Partial & { endTime?: Date; @@ -57,6 +64,43 @@ interface TabularDataPoint { payload: JsonValue; } +/** Optional parameters for uploading files */ +export interface FileUploadOptions { + /** + * Optional type of the component associated with the file (for example, + * "movement_sensor"). + */ + componentType?: string; + + /** Optional name of the component associated with the file. */ + componentName?: string; + + /** Optional name of the method associated with the file. */ + methodName?: string; + + /** + * Optional name of the file. The empty string `""` will be assigned as the + * file name if one isn't provided. + */ + fileName?: string; + + /** + * Optional file extension. The empty string `""` will be assigned as the file + * extension if one isn't provided. Files with a `.jpeg`, `.jpg`, or `.png` + * extension will be saved to the **Images** tab. + */ + fileExtension?: string; + + /** + * Optional list of tags to allow for tag-based filtering when retrieving + * data. + */ + tags?: string[]; + + /** Optional list of datasets to add the data to. */ + datasetIds?: string[]; +} + export type Dataset = Partial & { created?: Date; }; @@ -73,6 +117,7 @@ export class DataClient { private datasetClient: Client; private dataSyncClient: Client; private dataPipelinesClient: Client; + static readonly UPLOAD_CHUNK_SIZE = 8; constructor(transport: Transport) { this.dataClient = createClient(DataService, transport); @@ -1265,8 +1310,84 @@ export class DataClient { return resp.binaryDataId; } - // eslint-disable-next-line class-methods-use-this - createFilter(options: FilterOptions): Filter { + /** + * Upload arbitrary file data. + * + * Upload file data that may be stored on a robot along with the relevant + * metadata. File data can be found in the **Files** tab of the **DATA** + * page. + * + * @example + * + * ```ts + * const binaryDataId = await dataClient.fileUpload( + * binaryData, + * 'INSERT YOUR PART ID', + * { + * fileExtension: '.jpeg', + * tags: ['tag_1', 'tag_2'], + * } + * ); + * ``` + * + * For more information, see [Data + * API](https://docs.viam.com/dev/reference/apis/data-client/#fileupload). + * + * @param binaryData The data to be uploaded + * @param partId The part ID of the machine that captured the data + * @param options Options for the file upload + * @returns The binary data ID of the uploaded data + */ + async fileUpload( + binaryData: Uint8Array, + partId: string, + options?: FileUploadOptions + ) { + const md = new UploadMetadata({ + partId, + type: DataType.FILE, + ...options, + }); + + const response = await this.dataSyncClient.fileUpload( + DataClient.fileUploadRequests(md, binaryData) + ); + return response.binaryDataId; + } + + /** + * Create an async generator of FileUploadRequests to use with FileUpload + * methods. + * + * @param metadata The file's metadata + * @param data The binary data of the file + */ + // eslint-disable-next-line @typescript-eslint/require-await + private static async *fileUploadRequests( + metadata: UploadMetadata, + data: Uint8Array + ): AsyncGenerator> { + yield new FileUploadRequest({ + uploadPacket: { + case: 'metadata', + value: metadata, + }, + }); + for (let i = 0; i < data.length; i += DataClient.UPLOAD_CHUNK_SIZE) { + let end = i + DataClient.UPLOAD_CHUNK_SIZE; + if (end > data.length) { + end = data.length; + } + yield new FileUploadRequest({ + uploadPacket: { + case: 'fileContents', + value: new FileData({ data: data.slice(i, end) }), + }, + }); + } + } + + static createFilter(options: FilterOptions): Filter { const filter = new Filter(options); if (options.startTime ?? options.endTime) {