node-ejs-renderer/node_modules/openai/resources/beta/vector-stores/files.js

149 lines
6.2 KiB
JavaScript
Raw Normal View History

2024-06-09 13:55:01 -04:00
"use strict";
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.VectorStoreFilesPage = exports.Files = void 0;
const resource_1 = require("../../../resource.js");
const core_1 = require("../../../core.js");
const core_2 = require("../../../core.js");
const FilesAPI = __importStar(require("./files.js"));
const pagination_1 = require("../../../pagination.js");
class Files extends resource_1.APIResource {
/**
* Create a vector store file by attaching a
* [File](https://platform.openai.com/docs/api-reference/files) to a
* [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object).
*/
create(vectorStoreId, body, options) {
return this._client.post(`/vector_stores/${vectorStoreId}/files`, {
body,
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
}
/**
* Retrieves a vector store file.
*/
retrieve(vectorStoreId, fileId, options) {
return this._client.get(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
}
list(vectorStoreId, query = {}, options) {
if ((0, core_1.isRequestOptions)(query)) {
return this.list(vectorStoreId, {}, query);
}
return this._client.getAPIList(`/vector_stores/${vectorStoreId}/files`, VectorStoreFilesPage, {
query,
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
}
/**
* Delete a vector store file. This will remove the file from the vector store but
* the file itself will not be deleted. To delete the file, use the
* [delete file](https://platform.openai.com/docs/api-reference/files/delete)
* endpoint.
*/
del(vectorStoreId, fileId, options) {
return this._client.delete(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
});
}
/**
* Attach a file to the given vector store and wait for it to be processed.
*/
async createAndPoll(vectorStoreId, body, options) {
const file = await this.create(vectorStoreId, body, options);
return await this.poll(vectorStoreId, file.id, options);
}
/**
* Wait for the vector store file to finish processing.
*
* Note: this will return even if the file failed to process, you need to check
* file.last_error and file.status to handle these cases
*/
async poll(vectorStoreId, fileId, options) {
const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
if (options?.pollIntervalMs) {
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
}
while (true) {
const fileResponse = await this.retrieve(vectorStoreId, fileId, {
...options,
headers,
}).withResponse();
const file = fileResponse.data;
switch (file.status) {
case 'in_progress':
let sleepInterval = 5000;
if (options?.pollIntervalMs) {
sleepInterval = options.pollIntervalMs;
}
else {
const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms');
if (headerInterval) {
const headerIntervalMs = parseInt(headerInterval);
if (!isNaN(headerIntervalMs)) {
sleepInterval = headerIntervalMs;
}
}
}
await (0, core_2.sleep)(sleepInterval);
break;
case 'failed':
case 'completed':
return file;
}
}
}
/**
* Upload a file to the `files` API and then attach it to the given vector store.
*
* Note the file will be asynchronously processed (you can use the alternative
* polling helper method to wait for processing to complete).
*/
async upload(vectorStoreId, file, options) {
const fileInfo = await this._client.files.create({ file: file, purpose: 'assistants' }, options);
return this.create(vectorStoreId, { file_id: fileInfo.id }, options);
}
/**
* Add a file to a vector store and poll until processing is complete.
*/
async uploadAndPoll(vectorStoreId, file, options) {
const fileInfo = await this.upload(vectorStoreId, file, options);
return await this.poll(vectorStoreId, fileInfo.id, options);
}
}
exports.Files = Files;
class VectorStoreFilesPage extends pagination_1.CursorPage {
}
exports.VectorStoreFilesPage = VectorStoreFilesPage;
(function (Files) {
Files.VectorStoreFilesPage = FilesAPI.VectorStoreFilesPage;
})(Files = exports.Files || (exports.Files = {}));
//# sourceMappingURL=files.js.map