// Copyright 2022 Luca Casonato. All rights reserved. MIT license. /** * Fitness API Client for Deno * =========================== * * The Fitness API for managing users' fitness tracking data. * * Docs: https://developers.google.com/fit/rest/v1/get-started * Source: https://googleapis.deno.dev/v1/fitness:v1.ts */ import { auth, CredentialsClient, GoogleAuth, request } from "/_/base@v1/mod.ts"; export { auth, GoogleAuth }; export type { CredentialsClient }; /** * The Fitness API for managing users' fitness tracking data. */ export class Fitness { #client: CredentialsClient | undefined; #baseUrl: string; constructor(client?: CredentialsClient, baseUrl: string = "https://fitness.googleapis.com/fitness/v1/users/") { this.#client = client; this.#baseUrl = baseUrl; } /** * Aggregates data of a certain type or stream into buckets divided by a * given type of boundary. Multiple data sets of multiple types and from * multiple sources can be aggregated into exactly one bucket type per * request. * * @param userId Aggregate data for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDatasetAggregate(userId: string, req: AggregateRequest): Promise { req = serializeAggregateRequest(req); const url = new URL(`${this.#baseUrl}${ userId }/dataset:aggregate`); const body = JSON.stringify(req); const data = await request(url.href, { client: this.#client, method: "POST", body, }); return deserializeAggregateResponse(data); } /** * Creates a new data source that is unique across all data sources belonging * to this user. A data source is a unique source of sensor data. Data sources * can expose raw data coming from hardware sensors on local or companion * devices. They can also expose derived data, created by transforming or * merging other data sources. Multiple data sources can exist for the same * data type. Every data point in every dataset inserted into or read from the * Fitness API has an associated data source. Each data source produces a * unique stream of dataset updates, with a unique data source identifier. Not * all changes to data source affect the data stream ID, so that data * collected by updated versions of the same application/device can still be * considered to belong to the same data source. Data sources are identified * using a string generated by the server, based on the contents of the source * being created. The dataStreamId field should not be set when invoking this * method. It will be automatically generated by the server with the correct * format. If a dataStreamId is set, it must match the format that the server * would generate. This format is a combination of some fields from the data * source, and has a specific order. If it doesn't match, the request will * fail with an error. Specifying a DataType which is not a known type * (beginning with "com.google.") will create a DataSource with a *custom data * type*. Custom data types are only readable by the application that created * them. Custom data types are *deprecated*; use standard data types instead. * In addition to the data source fields included in the data source ID, the * developer project number that is authenticated when creating the data * source is included. This developer project number is obfuscated when read * by any other developer reading public data types. * * @param userId Create the data source for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesCreate(userId: string, req: DataSource): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources`); const body = JSON.stringify(req); const data = await request(url.href, { client: this.#client, method: "POST", body, }); return data as DataSource; } /** * Queries for user's data point changes for a particular data source. * * @param dataSourceId The data stream ID of the data source that created the dataset. * @param userId List data points for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesDataPointChangesList(dataSourceId: string, userId: string, opts: UsersDataSourcesDataPointChangesListOptions = {}): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources/${ dataSourceId }/dataPointChanges`); if (opts.limit !== undefined) { url.searchParams.append("limit", String(opts.limit)); } if (opts.pageToken !== undefined) { url.searchParams.append("pageToken", String(opts.pageToken)); } const data = await request(url.href, { client: this.#client, method: "GET", }); return deserializeListDataPointChangesResponse(data); } /** * Performs an inclusive delete of all data points whose start and end times * have any overlap with the time range specified by the dataset ID. For most * data types, the entire data point will be deleted. For data types where the * time span represents a consistent value (such as * com.google.activity.segment), and a data point straddles either end point * of the dataset, only the overlapping portion of the data point will be * deleted. * * @param datasetId Dataset identifier that is a composite of the minimum data point start time and maximum data point end time represented as nanoseconds from the epoch. The ID is formatted like: "startTime-endTime" where startTime and endTime are 64 bit integers. * @param dataSourceId The data stream ID of the data source that created the dataset. * @param userId Delete a dataset for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesDatasetsDelete(datasetId: string, dataSourceId: string, userId: string): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources/${ dataSourceId }/datasets/${ datasetId }`); const data = await request(url.href, { client: this.#client, method: "DELETE", }); } /** * Returns a dataset containing all data points whose start and end times * overlap with the specified range of the dataset minimum start time and * maximum end time. Specifically, any data point whose start time is less * than or equal to the dataset end time and whose end time is greater than or * equal to the dataset start time. * * @param datasetId Dataset identifier that is a composite of the minimum data point start time and maximum data point end time represented as nanoseconds from the epoch. The ID is formatted like: "startTime-endTime" where startTime and endTime are 64 bit integers. * @param dataSourceId The data stream ID of the data source that created the dataset. * @param userId Retrieve a dataset for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesDatasetsGet(datasetId: string, dataSourceId: string, userId: string, opts: UsersDataSourcesDatasetsGetOptions = {}): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources/${ dataSourceId }/datasets/${ datasetId }`); if (opts.limit !== undefined) { url.searchParams.append("limit", String(opts.limit)); } if (opts.pageToken !== undefined) { url.searchParams.append("pageToken", String(opts.pageToken)); } const data = await request(url.href, { client: this.#client, method: "GET", }); return deserializeDataset(data); } /** * Adds data points to a dataset. The dataset need not be previously created. * All points within the given dataset will be returned with subsquent calls * to retrieve this dataset. Data points can belong to more than one dataset. * This method does not use patch semantics: the data points provided are * merely inserted, with no existing data replaced. * * @param datasetId This field is not used, and can be safely omitted. * @param dataSourceId The data stream ID of the data source that created the dataset. * @param userId Patch a dataset for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesDatasetsPatch(datasetId: string, dataSourceId: string, userId: string, req: Dataset): Promise { req = serializeDataset(req); const url = new URL(`${this.#baseUrl}${ userId }/dataSources/${ dataSourceId }/datasets/${ datasetId }`); const body = JSON.stringify(req); const data = await request(url.href, { client: this.#client, method: "PATCH", body, }); return deserializeDataset(data); } /** * Deletes the specified data source. The request will fail if the data * source contains any data points. * * @param dataSourceId The data stream ID of the data source to delete. * @param userId Retrieve a data source for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesDelete(dataSourceId: string, userId: string): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources/${ dataSourceId }`); const data = await request(url.href, { client: this.#client, method: "DELETE", }); return data as DataSource; } /** * Returns the specified data source. * * @param dataSourceId The data stream ID of the data source to retrieve. * @param userId Retrieve a data source for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesGet(dataSourceId: string, userId: string): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources/${ dataSourceId }`); const data = await request(url.href, { client: this.#client, method: "GET", }); return data as DataSource; } /** * Lists all data sources that are visible to the developer, using the OAuth * scopes provided. The list is not exhaustive; the user may have private data * sources that are only visible to other developers, or calls using other * scopes. * * @param userId List data sources for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesList(userId: string, opts: UsersDataSourcesListOptions = {}): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources`); if (opts.dataTypeName !== undefined) { url.searchParams.append("dataTypeName", String(opts.dataTypeName)); } const data = await request(url.href, { client: this.#client, method: "GET", }); return data as ListDataSourcesResponse; } /** * Updates the specified data source. The dataStreamId, dataType, type, * dataStreamName, and device properties with the exception of version, cannot * be modified. Data sources are identified by their dataStreamId. * * @param dataSourceId The data stream ID of the data source to update. * @param userId Update the data source for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersDataSourcesUpdate(dataSourceId: string, userId: string, req: DataSource): Promise { const url = new URL(`${this.#baseUrl}${ userId }/dataSources/${ dataSourceId }`); const body = JSON.stringify(req); const data = await request(url.href, { client: this.#client, method: "PUT", body, }); return data as DataSource; } /** * Deletes a session specified by the given session ID. * * @param sessionId The ID of the session to be deleted. * @param userId Delete a session for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersSessionsDelete(sessionId: string, userId: string): Promise { const url = new URL(`${this.#baseUrl}${ userId }/sessions/${ sessionId }`); const data = await request(url.href, { client: this.#client, method: "DELETE", }); } /** * Lists sessions previously created. * * @param userId List sessions for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersSessionsList(userId: string, opts: UsersSessionsListOptions = {}): Promise { const url = new URL(`${this.#baseUrl}${ userId }/sessions`); if (opts.activityType !== undefined) { url.searchParams.append("activityType", String(opts.activityType)); } if (opts.endTime !== undefined) { url.searchParams.append("endTime", String(opts.endTime)); } if (opts.includeDeleted !== undefined) { url.searchParams.append("includeDeleted", String(opts.includeDeleted)); } if (opts.pageToken !== undefined) { url.searchParams.append("pageToken", String(opts.pageToken)); } if (opts.startTime !== undefined) { url.searchParams.append("startTime", String(opts.startTime)); } const data = await request(url.href, { client: this.#client, method: "GET", }); return deserializeListSessionsResponse(data); } /** * Updates or insert a given session. * * @param sessionId The ID of the session to be created. * @param userId Create sessions for the person identified. Use me to indicate the authenticated user. Only me is supported at this time. */ async usersSessionsUpdate(sessionId: string, userId: string, req: Session): Promise { req = serializeSession(req); const url = new URL(`${this.#baseUrl}${ userId }/sessions/${ sessionId }`); const body = JSON.stringify(req); const data = await request(url.href, { client: this.#client, method: "PUT", body, }); return deserializeSession(data); } } export interface AggregateBucket { /** * Available for Bucket.Type.ACTIVITY_TYPE, Bucket.Type.ACTIVITY_SEGMENT */ activity?: number; /** * There will be one dataset per AggregateBy in the request. */ dataset?: Dataset[]; /** * The end time for the aggregated data, in milliseconds since epoch, * inclusive. */ endTimeMillis?: bigint; /** * Available for Bucket.Type.SESSION */ session?: Session; /** * The start time for the aggregated data, in milliseconds since epoch, * inclusive. */ startTimeMillis?: bigint; /** * The type of a bucket signifies how the data aggregation is performed in * the bucket. */ type?: | "unknown" | "time" | "session" | "activityType" | "activitySegment"; } function serializeAggregateBucket(data: any): AggregateBucket { return { ...data, dataset: data["dataset"] !== undefined ? data["dataset"].map((item: any) => (serializeDataset(item))) : undefined, endTimeMillis: data["endTimeMillis"] !== undefined ? String(data["endTimeMillis"]) : undefined, session: data["session"] !== undefined ? serializeSession(data["session"]) : undefined, startTimeMillis: data["startTimeMillis"] !== undefined ? String(data["startTimeMillis"]) : undefined, }; } function deserializeAggregateBucket(data: any): AggregateBucket { return { ...data, dataset: data["dataset"] !== undefined ? data["dataset"].map((item: any) => (deserializeDataset(item))) : undefined, endTimeMillis: data["endTimeMillis"] !== undefined ? BigInt(data["endTimeMillis"]) : undefined, session: data["session"] !== undefined ? deserializeSession(data["session"]) : undefined, startTimeMillis: data["startTimeMillis"] !== undefined ? BigInt(data["startTimeMillis"]) : undefined, }; } /** * The specification of which data to aggregate. */ export interface AggregateBy { /** * A data source ID to aggregate. Only data from the specified data source ID * will be included in the aggregation. If specified, this data source must * exist; the OAuth scopes in the supplied credentials must grant read access * to this data type. The dataset in the response will have the same data * source ID. Note: Data can be aggregated by either the dataTypeName or the * dataSourceId, not both. */ dataSourceId?: string; /** * The data type to aggregate. All data sources providing this data type will * contribute data to the aggregation. The response will contain a single * dataset for this data type name. The dataset will have a data source ID of * derived::com.google.android.gms:aggregated. If the user has no data for * this data type, an empty data set will be returned. Note: Data can be * aggregated by either the dataTypeName or the dataSourceId, not both. */ dataTypeName?: string; } /** * Next id: 10 */ export interface AggregateRequest { /** * The specification of data to be aggregated. At least one aggregateBy spec * must be provided. All data that is specified will be aggregated using the * same bucketing criteria. There will be one dataset in the response for * every aggregateBy spec. */ aggregateBy?: AggregateBy[]; /** * Specifies that data be aggregated each activity segment recorded for a * user. Similar to bucketByActivitySegment, but bucketing is done for each * activity segment rather than all segments of the same type. Mutually * exclusive of other bucketing specifications. */ bucketByActivitySegment?: BucketByActivity; /** * Specifies that data be aggregated by the type of activity being performed * when the data was recorded. All data that was recorded during a certain * activity type (.for the given time range) will be aggregated into the same * bucket. Data that was recorded while the user was not active will not be * included in the response. Mutually exclusive of other bucketing * specifications. */ bucketByActivityType?: BucketByActivity; /** * Specifies that data be aggregated by user sessions. Data that does not * fall within the time range of a session will not be included in the * response. Mutually exclusive of other bucketing specifications. */ bucketBySession?: BucketBySession; /** * Specifies that data be aggregated by a single time interval. Mutually * exclusive of other bucketing specifications. */ bucketByTime?: BucketByTime; /** * The end of a window of time. Data that intersects with this time window * will be aggregated. The time is in milliseconds since epoch, inclusive. The * maximum allowed difference between start_time_millis // and end_time_millis * is 7776000000 (roughly 90 days). */ endTimeMillis?: bigint; /** * DO NOT POPULATE THIS FIELD. It is ignored. */ filteredDataQualityStandard?: | "dataQualityUnknown" | "dataQualityBloodPressureEsh2002" | "dataQualityBloodPressureEsh2010" | "dataQualityBloodPressureAami" | "dataQualityBloodPressureBhsAA" | "dataQualityBloodPressureBhsAB" | "dataQualityBloodPressureBhsBA" | "dataQualityBloodPressureBhsBB" | "dataQualityBloodGlucoseIso151972003" | "dataQualityBloodGlucoseIso151972013"[]; /** * The start of a window of time. Data that intersects with this time window * will be aggregated. The time is in milliseconds since epoch, inclusive. */ startTimeMillis?: bigint; } function serializeAggregateRequest(data: any): AggregateRequest { return { ...data, bucketByActivitySegment: data["bucketByActivitySegment"] !== undefined ? serializeBucketByActivity(data["bucketByActivitySegment"]) : undefined, bucketByActivityType: data["bucketByActivityType"] !== undefined ? serializeBucketByActivity(data["bucketByActivityType"]) : undefined, bucketBySession: data["bucketBySession"] !== undefined ? serializeBucketBySession(data["bucketBySession"]) : undefined, bucketByTime: data["bucketByTime"] !== undefined ? serializeBucketByTime(data["bucketByTime"]) : undefined, endTimeMillis: data["endTimeMillis"] !== undefined ? String(data["endTimeMillis"]) : undefined, startTimeMillis: data["startTimeMillis"] !== undefined ? String(data["startTimeMillis"]) : undefined, }; } function deserializeAggregateRequest(data: any): AggregateRequest { return { ...data, bucketByActivitySegment: data["bucketByActivitySegment"] !== undefined ? deserializeBucketByActivity(data["bucketByActivitySegment"]) : undefined, bucketByActivityType: data["bucketByActivityType"] !== undefined ? deserializeBucketByActivity(data["bucketByActivityType"]) : undefined, bucketBySession: data["bucketBySession"] !== undefined ? deserializeBucketBySession(data["bucketBySession"]) : undefined, bucketByTime: data["bucketByTime"] !== undefined ? deserializeBucketByTime(data["bucketByTime"]) : undefined, endTimeMillis: data["endTimeMillis"] !== undefined ? BigInt(data["endTimeMillis"]) : undefined, startTimeMillis: data["startTimeMillis"] !== undefined ? BigInt(data["startTimeMillis"]) : undefined, }; } export interface AggregateResponse { /** * A list of buckets containing the aggregated data. */ bucket?: AggregateBucket[]; } function serializeAggregateResponse(data: any): AggregateResponse { return { ...data, bucket: data["bucket"] !== undefined ? data["bucket"].map((item: any) => (serializeAggregateBucket(item))) : undefined, }; } function deserializeAggregateResponse(data: any): AggregateResponse { return { ...data, bucket: data["bucket"] !== undefined ? data["bucket"].map((item: any) => (deserializeAggregateBucket(item))) : undefined, }; } export interface Application { /** * An optional URI that can be used to link back to the application. */ detailsUrl?: string; /** * The name of this application. This is required for REST clients, but we do * not enforce uniqueness of this name. It is provided as a matter of * convenience for other developers who would like to identify which REST * created an Application or Data Source. */ name?: string; /** * Package name for this application. This is used as a unique identifier * when created by Android applications, but cannot be specified by REST * clients. REST clients will have their developer project number reflected * into the Data Source data stream IDs, instead of the packageName. */ packageName?: string; /** * Version of the application. You should update this field whenever the * application changes in a way that affects the computation of the data. */ version?: string; } export interface BucketByActivity { /** * The default activity stream will be used if a specific * activityDataSourceId is not specified. */ activityDataSourceId?: string; /** * Specifies that only activity segments of duration longer than * minDurationMillis are considered and used as a container for aggregated * data. */ minDurationMillis?: bigint; } function serializeBucketByActivity(data: any): BucketByActivity { return { ...data, minDurationMillis: data["minDurationMillis"] !== undefined ? String(data["minDurationMillis"]) : undefined, }; } function deserializeBucketByActivity(data: any): BucketByActivity { return { ...data, minDurationMillis: data["minDurationMillis"] !== undefined ? BigInt(data["minDurationMillis"]) : undefined, }; } export interface BucketBySession { /** * Specifies that only sessions of duration longer than minDurationMillis are * considered and used as a container for aggregated data. */ minDurationMillis?: bigint; } function serializeBucketBySession(data: any): BucketBySession { return { ...data, minDurationMillis: data["minDurationMillis"] !== undefined ? String(data["minDurationMillis"]) : undefined, }; } function deserializeBucketBySession(data: any): BucketBySession { return { ...data, minDurationMillis: data["minDurationMillis"] !== undefined ? BigInt(data["minDurationMillis"]) : undefined, }; } export interface BucketByTime { /** * Specifies that result buckets aggregate data by exactly durationMillis * time frames. Time frames that contain no data will be included in the * response with an empty dataset. */ durationMillis?: bigint; period?: BucketByTimePeriod; } function serializeBucketByTime(data: any): BucketByTime { return { ...data, durationMillis: data["durationMillis"] !== undefined ? String(data["durationMillis"]) : undefined, }; } function deserializeBucketByTime(data: any): BucketByTime { return { ...data, durationMillis: data["durationMillis"] !== undefined ? BigInt(data["durationMillis"]) : undefined, }; } export interface BucketByTimePeriod { /** * org.joda.timezone.DateTimeZone */ timeZoneId?: string; type?: | "day" | "week" | "month"; value?: number; } /** * Represents a single data point, generated by a particular data source. A * data point holds a value for each field, an end timestamp and an optional * start time. The exact semantics of each of these attributes are specified in * the documentation for the particular data type. A data point can represent an * instantaneous measurement, reading or input observation, as well as averages * or aggregates over a time interval. Check the data type documentation to * determine which is the case for a particular data type. Data points always * contain one value for each field of the data type. */ export interface DataPoint { /** * DO NOT USE THIS FIELD. It is ignored, and not stored. */ computationTimeMillis?: bigint; /** * The data type defining the format of the values in this data point. */ dataTypeName?: string; /** * The end time of the interval represented by this data point, in * nanoseconds since epoch. */ endTimeNanos?: bigint; /** * Indicates the last time this data point was modified. Useful only in * contexts where we are listing the data changes, rather than representing * the current state of the data. */ modifiedTimeMillis?: bigint; /** * If the data point is contained in a dataset for a derived data source, * this field will be populated with the data source stream ID that created * the data point originally. WARNING: do not rely on this field for anything * other than debugging. The value of this field, if it is set at all, is an * implementation detail and is not guaranteed to remain consistent. */ originDataSourceId?: string; /** * The raw timestamp from the original SensorEvent. */ rawTimestampNanos?: bigint; /** * The start time of the interval represented by this data point, in * nanoseconds since epoch. */ startTimeNanos?: bigint; /** * Values of each data type field for the data point. It is expected that * each value corresponding to a data type field will occur in the same order * that the field is listed with in the data type specified in a data source. * Only one of integer and floating point fields will be populated, depending * on the format enum value within data source's type field. */ value?: Value[]; } function serializeDataPoint(data: any): DataPoint { return { ...data, computationTimeMillis: data["computationTimeMillis"] !== undefined ? String(data["computationTimeMillis"]) : undefined, endTimeNanos: data["endTimeNanos"] !== undefined ? String(data["endTimeNanos"]) : undefined, modifiedTimeMillis: data["modifiedTimeMillis"] !== undefined ? String(data["modifiedTimeMillis"]) : undefined, rawTimestampNanos: data["rawTimestampNanos"] !== undefined ? String(data["rawTimestampNanos"]) : undefined, startTimeNanos: data["startTimeNanos"] !== undefined ? String(data["startTimeNanos"]) : undefined, }; } function deserializeDataPoint(data: any): DataPoint { return { ...data, computationTimeMillis: data["computationTimeMillis"] !== undefined ? BigInt(data["computationTimeMillis"]) : undefined, endTimeNanos: data["endTimeNanos"] !== undefined ? BigInt(data["endTimeNanos"]) : undefined, modifiedTimeMillis: data["modifiedTimeMillis"] !== undefined ? BigInt(data["modifiedTimeMillis"]) : undefined, rawTimestampNanos: data["rawTimestampNanos"] !== undefined ? BigInt(data["rawTimestampNanos"]) : undefined, startTimeNanos: data["startTimeNanos"] !== undefined ? BigInt(data["startTimeNanos"]) : undefined, }; } /** * A dataset represents a projection container for data points. They do not * carry any info of their own. Datasets represent a set of data points from a * particular data source. A data point can be found in more than one dataset. */ export interface Dataset { /** * The data stream ID of the data source that created the points in this * dataset. */ dataSourceId?: string; /** * The largest end time of all data points in this possibly partial * representation of the dataset. Time is in nanoseconds from epoch. This * should also match the second part of the dataset identifier. */ maxEndTimeNs?: bigint; /** * The smallest start time of all data points in this possibly partial * representation of the dataset. Time is in nanoseconds from epoch. This * should also match the first part of the dataset identifier. */ minStartTimeNs?: bigint; /** * This token will be set when a dataset is received in response to a GET * request and the dataset is too large to be included in a single response. * Provide this value in a subsequent GET request to return the next page of * data points within this dataset. */ nextPageToken?: string; /** * A partial list of data points contained in the dataset, ordered by * endTimeNanos. This list is considered complete when retrieving a small * dataset and partial when patching a dataset or retrieving a dataset that is * too large to include in a single response. */ point?: DataPoint[]; } function serializeDataset(data: any): Dataset { return { ...data, maxEndTimeNs: data["maxEndTimeNs"] !== undefined ? String(data["maxEndTimeNs"]) : undefined, minStartTimeNs: data["minStartTimeNs"] !== undefined ? String(data["minStartTimeNs"]) : undefined, point: data["point"] !== undefined ? data["point"].map((item: any) => (serializeDataPoint(item))) : undefined, }; } function deserializeDataset(data: any): Dataset { return { ...data, maxEndTimeNs: data["maxEndTimeNs"] !== undefined ? BigInt(data["maxEndTimeNs"]) : undefined, minStartTimeNs: data["minStartTimeNs"] !== undefined ? BigInt(data["minStartTimeNs"]) : undefined, point: data["point"] !== undefined ? data["point"].map((item: any) => (deserializeDataPoint(item))) : undefined, }; } /** * Definition of a unique source of sensor data. Data sources can expose raw * data coming from hardware sensors on local or companion devices. They can * also expose derived data, created by transforming or merging other data * sources. Multiple data sources can exist for the same data type. Every data * point inserted into or read from this service has an associated data source. * The data source contains enough information to uniquely identify its data, * including the hardware device and the application that collected and/or * transformed the data. It also holds useful metadata, such as the hardware and * application versions, and the device type. Each data source produces a unique * stream of data, with a unique identifier. Not all changes to data source * affect the stream identifier, so that data collected by updated versions of * the same application/device can still be considered to belong to the same * data stream. */ export interface DataSource { /** * Information about an application which feeds sensor data into the * platform. */ application?: Application; /** * DO NOT POPULATE THIS FIELD. It is never populated in responses from the * platform, and is ignored in queries. It will be removed in a future version * entirely. */ dataQualityStandard?: | "dataQualityUnknown" | "dataQualityBloodPressureEsh2002" | "dataQualityBloodPressureEsh2010" | "dataQualityBloodPressureAami" | "dataQualityBloodPressureBhsAA" | "dataQualityBloodPressureBhsAB" | "dataQualityBloodPressureBhsBA" | "dataQualityBloodPressureBhsBB" | "dataQualityBloodGlucoseIso151972003" | "dataQualityBloodGlucoseIso151972013"[]; /** * A unique identifier for the data stream produced by this data source. The * identifier includes: - The physical device's manufacturer, model, and * serial number (UID). - The application's package name or name. Package name * is used when the data source was created by an Android application. The * developer project number is used when the data source was created by a REST * client. - The data source's type. - The data source's stream name. Note * that not all attributes of the data source are used as part of the stream * identifier. In particular, the version of the hardware/the application * isn't used. This allows us to preserve the same stream through version * updates. This also means that two DataSource objects may represent the same * data stream even if they're not equal. The exact format of the data stream * ID created by an Android application is: * type:dataType.name:application.packageName:device.manufacturer:device.model:device.uid:dataStreamName * The exact format of the data stream ID created by a REST client is: * type:dataType.name:developer project * number:device.manufacturer:device.model:device.uid:dataStreamName When any * of the optional fields that make up the data stream ID are absent, they * will be omitted from the data stream ID. The minimum viable data stream ID * would be: type:dataType.name:developer project number Finally, the * developer project number and device UID are obfuscated when read by any * REST or Android client that did not create the data source. Only the data * source creator will see the developer project number in clear and normal * form. This means a client will see a different set of data_stream_ids than * another client with different credentials. */ dataStreamId?: string; /** * The stream name uniquely identifies this particular data source among * other data sources of the same type from the same underlying producer. * Setting the stream name is optional, but should be done whenever an * application exposes two streams for the same data type, or when a device * has two equivalent sensors. */ dataStreamName?: string; /** * The data type defines the schema for a stream of data being collected by, * inserted into, or queried from the Fitness API. */ dataType?: DataType; /** * Representation of an integrated device (such as a phone or a wearable) * that can hold sensors. */ device?: Device; /** * An end-user visible name for this data source. */ name?: string; /** * A constant describing the type of this data source. Indicates whether this * data source produces raw or derived data. */ type?: | "raw" | "derived"; } export interface DataType { /** * A field represents one dimension of a data type. */ field?: DataTypeField[]; /** * Each data type has a unique, namespaced, name. All data types in the * com.google namespace are shared as part of the platform. */ name?: string; } /** * In case of multi-dimensional data (such as an accelerometer with x, y, and z * axes) each field represents one dimension. Each data type field has a unique * name which identifies it. The field also defines the format of the data (int, * float, etc.). This message is only instantiated in code and not used for wire * comms or stored in any way. */ export interface DataTypeField { /** * The different supported formats for each field in a data type. */ format?: | "integer" | "floatPoint" | "string" | "map" | "integerList" | "floatList" | "blob"; /** * Defines the name and format of data. Unlike data type names, field names * are not namespaced, and only need to be unique within the data type. */ name?: string; optional?: boolean; } /** * Representation of an integrated device (such as a phone or a wearable) that * can hold sensors. Each sensor is exposed as a data source. The main purpose * of the device information contained in this class is to identify the hardware * of a particular data source. This can be useful in different ways, including: * - Distinguishing two similar sensors on different devices (the step counter * on two nexus 5 phones, for instance) - Display the source of data to the user * (by using the device make / model) - Treat data differently depending on * sensor type (accelerometers on a watch may give different patterns than those * on a phone) - Build different analysis models for each device/version. */ export interface Device { /** * Manufacturer of the product/hardware. */ manufacturer?: string; /** * End-user visible model name for the device. */ model?: string; /** * A constant representing the type of the device. */ type?: | "unknown" | "phone" | "tablet" | "watch" | "chestStrap" | "scale" | "headMounted" | "smartDisplay"; /** * The serial number or other unique ID for the hardware. This field is * obfuscated when read by any REST or Android client that did not create the * data source. Only the data source creator will see the uid field in clear * and normal form. The obfuscation preserves equality; that is, given two * IDs, if id1 == id2, obfuscated(id1) == obfuscated(id2). */ uid?: string; /** * Version string for the device hardware/software. */ version?: string; } export interface ListDataPointChangesResponse { /** * The data stream ID of the data source with data point changes. */ dataSourceId?: string; /** * Deleted data points for the user. Note, for modifications this should be * parsed before handling insertions. */ deletedDataPoint?: DataPoint[]; /** * Inserted data points for the user. */ insertedDataPoint?: DataPoint[]; /** * The continuation token, which is used to page through large result sets. * Provide this value in a subsequent request to return the next page of * results. */ nextPageToken?: string; } function serializeListDataPointChangesResponse(data: any): ListDataPointChangesResponse { return { ...data, deletedDataPoint: data["deletedDataPoint"] !== undefined ? data["deletedDataPoint"].map((item: any) => (serializeDataPoint(item))) : undefined, insertedDataPoint: data["insertedDataPoint"] !== undefined ? data["insertedDataPoint"].map((item: any) => (serializeDataPoint(item))) : undefined, }; } function deserializeListDataPointChangesResponse(data: any): ListDataPointChangesResponse { return { ...data, deletedDataPoint: data["deletedDataPoint"] !== undefined ? data["deletedDataPoint"].map((item: any) => (deserializeDataPoint(item))) : undefined, insertedDataPoint: data["insertedDataPoint"] !== undefined ? data["insertedDataPoint"].map((item: any) => (deserializeDataPoint(item))) : undefined, }; } export interface ListDataSourcesResponse { /** * A previously created data source. */ dataSource?: DataSource[]; } export interface ListSessionsResponse { /** * If includeDeleted is set to true in the request, and startTime and endTime * are omitted, this will include sessions which were deleted since the last * sync. */ deletedSession?: Session[]; /** * Flag to indicate server has more data to transfer. DO NOT USE THIS FIELD. * It is never populated in responses from the server. */ hasMoreData?: boolean; /** * The sync token which is used to sync further changes. This will only be * provided if both startTime and endTime are omitted from the request. */ nextPageToken?: string; /** * Sessions with an end time that is between startTime and endTime of the * request. */ session?: Session[]; } function serializeListSessionsResponse(data: any): ListSessionsResponse { return { ...data, deletedSession: data["deletedSession"] !== undefined ? data["deletedSession"].map((item: any) => (serializeSession(item))) : undefined, session: data["session"] !== undefined ? data["session"].map((item: any) => (serializeSession(item))) : undefined, }; } function deserializeListSessionsResponse(data: any): ListSessionsResponse { return { ...data, deletedSession: data["deletedSession"] !== undefined ? data["deletedSession"].map((item: any) => (deserializeSession(item))) : undefined, session: data["session"] !== undefined ? data["session"].map((item: any) => (deserializeSession(item))) : undefined, }; } /** * Holder object for the value of an entry in a map field of a data point. A * map value supports a subset of the formats that the regular Value supports. */ export interface MapValue { /** * Floating point value. */ fpVal?: number; } /** * Sessions contain metadata, such as a user-friendly name and time interval * information. */ export interface Session { /** * Session active time. While start_time_millis and end_time_millis define * the full session time, the active time can be shorter and specified by * active_time_millis. If the inactive time during the session is known, it * should also be inserted via a com.google.activity.segment data point with a * STILL activity value */ activeTimeMillis?: bigint; /** * The type of activity this session represents. */ activityType?: number; /** * The application that created the session. */ application?: Application; /** * A description for this session. */ description?: string; /** * An end time, in milliseconds since epoch, inclusive. */ endTimeMillis?: bigint; /** * A client-generated identifier that is unique across all sessions owned by * this particular user. */ id?: string; /** * A timestamp that indicates when the session was last modified. */ modifiedTimeMillis?: bigint; /** * A human readable name of the session. */ name?: string; /** * A start time, in milliseconds since epoch, inclusive. */ startTimeMillis?: bigint; } function serializeSession(data: any): Session { return { ...data, activeTimeMillis: data["activeTimeMillis"] !== undefined ? String(data["activeTimeMillis"]) : undefined, endTimeMillis: data["endTimeMillis"] !== undefined ? String(data["endTimeMillis"]) : undefined, modifiedTimeMillis: data["modifiedTimeMillis"] !== undefined ? String(data["modifiedTimeMillis"]) : undefined, startTimeMillis: data["startTimeMillis"] !== undefined ? String(data["startTimeMillis"]) : undefined, }; } function deserializeSession(data: any): Session { return { ...data, activeTimeMillis: data["activeTimeMillis"] !== undefined ? BigInt(data["activeTimeMillis"]) : undefined, endTimeMillis: data["endTimeMillis"] !== undefined ? BigInt(data["endTimeMillis"]) : undefined, modifiedTimeMillis: data["modifiedTimeMillis"] !== undefined ? BigInt(data["modifiedTimeMillis"]) : undefined, startTimeMillis: data["startTimeMillis"] !== undefined ? BigInt(data["startTimeMillis"]) : undefined, }; } /** * Additional options for Fitness#usersDataSourcesDataPointChangesList. */ export interface UsersDataSourcesDataPointChangesListOptions { /** * If specified, no more than this many data point changes will be included * in the response. */ limit?: number; /** * The continuation token, which is used to page through large result sets. * To get the next page of results, set this parameter to the value of * nextPageToken from the previous response. */ pageToken?: string; } /** * Additional options for Fitness#usersDataSourcesDatasetsGet. */ export interface UsersDataSourcesDatasetsGetOptions { /** * If specified, no more than this many data points will be included in the * dataset. If there are more data points in the dataset, nextPageToken will * be set in the dataset response. The limit is applied from the end of the * time range. That is, if pageToken is absent, the limit most recent data * points will be returned. */ limit?: number; /** * The continuation token, which is used to page through large datasets. To * get the next page of a dataset, set this parameter to the value of * nextPageToken from the previous response. Each subsequent call will yield a * partial dataset with data point end timestamps that are strictly smaller * than those in the previous partial response. */ pageToken?: string; } /** * Additional options for Fitness#usersDataSourcesList. */ export interface UsersDataSourcesListOptions { /** * The names of data types to include in the list. If not specified, all data * sources will be returned. */ dataTypeName?: string; } /** * Additional options for Fitness#usersSessionsList. */ export interface UsersSessionsListOptions { /** * If non-empty, only sessions with these activity types should be returned. */ activityType?: number; /** * An RFC3339 timestamp. Only sessions ending between the start and end times * will be included in the response. If this time is omitted but startTime is * specified, all sessions from startTime to the end of time will be returned. */ endTime?: string; /** * If true, and if both startTime and endTime are omitted, session deletions * will be returned. */ includeDeleted?: boolean; /** * The continuation token, which is used for incremental syncing. To get the * next batch of changes, set this parameter to the value of nextPageToken * from the previous response. The page token is ignored if either start or * end time is specified. If none of start time, end time, and the page token * is specified, sessions modified in the last 30 days are returned. */ pageToken?: string; /** * An RFC3339 timestamp. Only sessions ending between the start and end times * will be included in the response. If this time is omitted but endTime is * specified, all sessions from the start of time up to endTime will be * returned. */ startTime?: string; } /** * Holder object for the value of a single field in a data point. A field value * has a particular format and is only ever set to one of an integer or a * floating point value. */ export interface Value { /** * Floating point value. When this is set, other values must not be set. */ fpVal?: number; /** * Integer value. When this is set, other values must not be set. */ intVal?: number; /** * Map value. The valid key space and units for the corresponding value of * each entry should be documented as part of the data type definition. Keys * should be kept small whenever possible. Data streams with large keys and * high data frequency may be down sampled. */ mapVal?: ValueMapValEntry[]; /** * String value. When this is set, other values must not be set. Strings * should be kept small whenever possible. Data streams with large string * values and high data frequency may be down sampled. */ stringVal?: string; } export interface ValueMapValEntry { key?: string; value?: MapValue; }