12265 lines
436 KiB
JavaScript
12265 lines
436 KiB
JavaScript
import { e as getAugmentedNamespace, d as commonjsGlobal, b as React } from './vendor-react.js';
|
|
|
|
const scriptRel = 'modulepreload';const assetsURL = function(dep) { return "/"+dep };const seen = {};const __vitePreload = function preload(baseModule, deps, importerUrl) {
|
|
let promise = Promise.resolve();
|
|
if (true && deps && deps.length > 0) {
|
|
document.getElementsByTagName("link");
|
|
const cspNonceMeta = document.querySelector(
|
|
"meta[property=csp-nonce]"
|
|
);
|
|
const cspNonce = cspNonceMeta?.nonce || cspNonceMeta?.getAttribute("nonce");
|
|
promise = Promise.allSettled(
|
|
deps.map((dep) => {
|
|
dep = assetsURL(dep);
|
|
if (dep in seen) return;
|
|
seen[dep] = true;
|
|
const isCss = dep.endsWith(".css");
|
|
const cssSelector = isCss ? '[rel="stylesheet"]' : "";
|
|
if (document.querySelector(`link[href="${dep}"]${cssSelector}`)) {
|
|
return;
|
|
}
|
|
const link = document.createElement("link");
|
|
link.rel = isCss ? "stylesheet" : scriptRel;
|
|
if (!isCss) {
|
|
link.as = "script";
|
|
}
|
|
link.crossOrigin = "";
|
|
link.href = dep;
|
|
if (cspNonce) {
|
|
link.setAttribute("nonce", cspNonce);
|
|
}
|
|
document.head.appendChild(link);
|
|
if (isCss) {
|
|
return new Promise((res, rej) => {
|
|
link.addEventListener("load", res);
|
|
link.addEventListener(
|
|
"error",
|
|
() => rej(new Error(`Unable to preload CSS for ${dep}`))
|
|
);
|
|
});
|
|
}
|
|
})
|
|
);
|
|
}
|
|
function handlePreloadError(err) {
|
|
const e = new Event("vite:preloadError", {
|
|
cancelable: true
|
|
});
|
|
e.payload = err;
|
|
window.dispatchEvent(e);
|
|
if (!e.defaultPrevented) {
|
|
throw err;
|
|
}
|
|
}
|
|
return promise.then((res) => {
|
|
for (const item of res || []) {
|
|
if (item.status !== "rejected") continue;
|
|
handlePreloadError(item.reason);
|
|
}
|
|
return baseModule().catch(handlePreloadError);
|
|
});
|
|
};
|
|
|
|
const resolveFetch$3 = (customFetch) => {
|
|
let _fetch;
|
|
if (customFetch) {
|
|
_fetch = customFetch;
|
|
}
|
|
else if (typeof fetch === 'undefined') {
|
|
_fetch = (...args) => __vitePreload(async () => { const {default: fetch} = await Promise.resolve().then(() => browser);return { default: fetch }},true?void 0:void 0).then(({ default: fetch }) => fetch(...args));
|
|
}
|
|
else {
|
|
_fetch = fetch;
|
|
}
|
|
return (...args) => _fetch(...args);
|
|
};
|
|
|
|
class FunctionsError extends Error {
|
|
constructor(message, name = 'FunctionsError', context) {
|
|
super(message);
|
|
this.name = name;
|
|
this.context = context;
|
|
}
|
|
}
|
|
class FunctionsFetchError extends FunctionsError {
|
|
constructor(context) {
|
|
super('Failed to send a request to the Edge Function', 'FunctionsFetchError', context);
|
|
}
|
|
}
|
|
class FunctionsRelayError extends FunctionsError {
|
|
constructor(context) {
|
|
super('Relay Error invoking the Edge Function', 'FunctionsRelayError', context);
|
|
}
|
|
}
|
|
class FunctionsHttpError extends FunctionsError {
|
|
constructor(context) {
|
|
super('Edge Function returned a non-2xx status code', 'FunctionsHttpError', context);
|
|
}
|
|
}
|
|
// Define the enum for the 'region' property
|
|
var FunctionRegion;
|
|
(function (FunctionRegion) {
|
|
FunctionRegion["Any"] = "any";
|
|
FunctionRegion["ApNortheast1"] = "ap-northeast-1";
|
|
FunctionRegion["ApNortheast2"] = "ap-northeast-2";
|
|
FunctionRegion["ApSouth1"] = "ap-south-1";
|
|
FunctionRegion["ApSoutheast1"] = "ap-southeast-1";
|
|
FunctionRegion["ApSoutheast2"] = "ap-southeast-2";
|
|
FunctionRegion["CaCentral1"] = "ca-central-1";
|
|
FunctionRegion["EuCentral1"] = "eu-central-1";
|
|
FunctionRegion["EuWest1"] = "eu-west-1";
|
|
FunctionRegion["EuWest2"] = "eu-west-2";
|
|
FunctionRegion["EuWest3"] = "eu-west-3";
|
|
FunctionRegion["SaEast1"] = "sa-east-1";
|
|
FunctionRegion["UsEast1"] = "us-east-1";
|
|
FunctionRegion["UsWest1"] = "us-west-1";
|
|
FunctionRegion["UsWest2"] = "us-west-2";
|
|
})(FunctionRegion || (FunctionRegion = {}));
|
|
|
|
var __awaiter$7 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
class FunctionsClient {
|
|
constructor(url, { headers = {}, customFetch, region = FunctionRegion.Any, } = {}) {
|
|
this.url = url;
|
|
this.headers = headers;
|
|
this.region = region;
|
|
this.fetch = resolveFetch$3(customFetch);
|
|
}
|
|
/**
|
|
* Updates the authorization header
|
|
* @param token - the new jwt token sent in the authorisation header
|
|
*/
|
|
setAuth(token) {
|
|
this.headers.Authorization = `Bearer ${token}`;
|
|
}
|
|
/**
|
|
* Invokes a function
|
|
* @param functionName - The name of the Function to invoke.
|
|
* @param options - Options for invoking the Function.
|
|
*/
|
|
invoke(functionName, options = {}) {
|
|
var _a;
|
|
return __awaiter$7(this, void 0, void 0, function* () {
|
|
try {
|
|
const { headers, method, body: functionArgs } = options;
|
|
let _headers = {};
|
|
let { region } = options;
|
|
if (!region) {
|
|
region = this.region;
|
|
}
|
|
// Add region as query parameter using URL API
|
|
const url = new URL(`${this.url}/${functionName}`);
|
|
if (region && region !== 'any') {
|
|
_headers['x-region'] = region;
|
|
url.searchParams.set('forceFunctionRegion', region);
|
|
}
|
|
let body;
|
|
if (functionArgs &&
|
|
((headers && !Object.prototype.hasOwnProperty.call(headers, 'Content-Type')) || !headers)) {
|
|
if ((typeof Blob !== 'undefined' && functionArgs instanceof Blob) ||
|
|
functionArgs instanceof ArrayBuffer) {
|
|
// will work for File as File inherits Blob
|
|
// also works for ArrayBuffer as it is the same underlying structure as a Blob
|
|
_headers['Content-Type'] = 'application/octet-stream';
|
|
body = functionArgs;
|
|
}
|
|
else if (typeof functionArgs === 'string') {
|
|
// plain string
|
|
_headers['Content-Type'] = 'text/plain';
|
|
body = functionArgs;
|
|
}
|
|
else if (typeof FormData !== 'undefined' && functionArgs instanceof FormData) {
|
|
// don't set content-type headers
|
|
// Request will automatically add the right boundary value
|
|
body = functionArgs;
|
|
}
|
|
else {
|
|
// default, assume this is JSON
|
|
_headers['Content-Type'] = 'application/json';
|
|
body = JSON.stringify(functionArgs);
|
|
}
|
|
}
|
|
const response = yield this.fetch(url.toString(), {
|
|
method: method || 'POST',
|
|
// headers priority is (high to low):
|
|
// 1. invoke-level headers
|
|
// 2. client-level headers
|
|
// 3. default Content-Type header
|
|
headers: Object.assign(Object.assign(Object.assign({}, _headers), this.headers), headers),
|
|
body,
|
|
}).catch((fetchError) => {
|
|
throw new FunctionsFetchError(fetchError);
|
|
});
|
|
const isRelayError = response.headers.get('x-relay-error');
|
|
if (isRelayError && isRelayError === 'true') {
|
|
throw new FunctionsRelayError(response);
|
|
}
|
|
if (!response.ok) {
|
|
throw new FunctionsHttpError(response);
|
|
}
|
|
let responseType = ((_a = response.headers.get('Content-Type')) !== null && _a !== void 0 ? _a : 'text/plain').split(';')[0].trim();
|
|
let data;
|
|
if (responseType === 'application/json') {
|
|
data = yield response.json();
|
|
}
|
|
else if (responseType === 'application/octet-stream') {
|
|
data = yield response.blob();
|
|
}
|
|
else if (responseType === 'text/event-stream') {
|
|
data = response;
|
|
}
|
|
else if (responseType === 'multipart/form-data') {
|
|
data = yield response.formData();
|
|
}
|
|
else {
|
|
// default to text
|
|
data = yield response.text();
|
|
}
|
|
return { data, error: null, response };
|
|
}
|
|
catch (error) {
|
|
return {
|
|
data: null,
|
|
error,
|
|
response: error instanceof FunctionsHttpError || error instanceof FunctionsRelayError
|
|
? error.context
|
|
: undefined,
|
|
};
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
var cjs = {};
|
|
|
|
var PostgrestClient$2 = {};
|
|
|
|
var PostgrestQueryBuilder$2 = {};
|
|
|
|
var PostgrestFilterBuilder$2 = {};
|
|
|
|
var PostgrestTransformBuilder$2 = {};
|
|
|
|
var PostgrestBuilder$2 = {};
|
|
|
|
// ref: https://github.com/tc39/proposal-global
|
|
var getGlobal = function() {
|
|
// the only reliable means to get the global object is
|
|
// `Function('return this')()`
|
|
// However, this causes CSP violations in Chrome apps.
|
|
if (typeof self !== 'undefined') { return self; }
|
|
if (typeof window !== 'undefined') { return window; }
|
|
if (typeof global !== 'undefined') { return global; }
|
|
throw new Error('unable to locate global object');
|
|
};
|
|
|
|
var globalObject = getGlobal();
|
|
|
|
const fetch$1 = globalObject.fetch;
|
|
|
|
const nodeFetch = globalObject.fetch.bind(globalObject);
|
|
|
|
const Headers$1 = globalObject.Headers;
|
|
const Request$1 = globalObject.Request;
|
|
const Response$1 = globalObject.Response;
|
|
|
|
const browser = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty({
|
|
__proto__: null,
|
|
Headers: Headers$1,
|
|
Request: Request$1,
|
|
Response: Response$1,
|
|
default: nodeFetch,
|
|
fetch: fetch$1
|
|
}, Symbol.toStringTag, { value: 'Module' }));
|
|
|
|
const require$$0 = /*@__PURE__*/getAugmentedNamespace(browser);
|
|
|
|
var PostgrestError$2 = {};
|
|
|
|
Object.defineProperty(PostgrestError$2, "__esModule", { value: true });
|
|
/**
|
|
* Error format
|
|
*
|
|
* {@link https://postgrest.org/en/stable/api.html?highlight=options#errors-and-http-status-codes}
|
|
*/
|
|
let PostgrestError$1 = class PostgrestError extends Error {
|
|
constructor(context) {
|
|
super(context.message);
|
|
this.name = 'PostgrestError';
|
|
this.details = context.details;
|
|
this.hint = context.hint;
|
|
this.code = context.code;
|
|
}
|
|
};
|
|
PostgrestError$2.default = PostgrestError$1;
|
|
|
|
var __importDefault$5 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(PostgrestBuilder$2, "__esModule", { value: true });
|
|
// @ts-ignore
|
|
const node_fetch_1 = __importDefault$5(require$$0);
|
|
const PostgrestError_1$1 = __importDefault$5(PostgrestError$2);
|
|
let PostgrestBuilder$1 = class PostgrestBuilder {
|
|
constructor(builder) {
|
|
var _a, _b;
|
|
this.shouldThrowOnError = false;
|
|
this.method = builder.method;
|
|
this.url = builder.url;
|
|
this.headers = new Headers(builder.headers);
|
|
this.schema = builder.schema;
|
|
this.body = builder.body;
|
|
this.shouldThrowOnError = (_a = builder.shouldThrowOnError) !== null && _a !== void 0 ? _a : false;
|
|
this.signal = builder.signal;
|
|
this.isMaybeSingle = (_b = builder.isMaybeSingle) !== null && _b !== void 0 ? _b : false;
|
|
if (builder.fetch) {
|
|
this.fetch = builder.fetch;
|
|
}
|
|
else if (typeof fetch === 'undefined') {
|
|
this.fetch = node_fetch_1.default;
|
|
}
|
|
else {
|
|
this.fetch = fetch;
|
|
}
|
|
}
|
|
/**
|
|
* If there's an error with the query, throwOnError will reject the promise by
|
|
* throwing the error instead of returning it as part of a successful response.
|
|
*
|
|
* {@link https://github.com/supabase/supabase-js/issues/92}
|
|
*/
|
|
throwOnError() {
|
|
this.shouldThrowOnError = true;
|
|
return this;
|
|
}
|
|
/**
|
|
* Set an HTTP header for the request.
|
|
*/
|
|
setHeader(name, value) {
|
|
this.headers = new Headers(this.headers);
|
|
this.headers.set(name, value);
|
|
return this;
|
|
}
|
|
then(onfulfilled, onrejected) {
|
|
// https://postgrest.org/en/stable/api.html#switching-schemas
|
|
if (this.schema === undefined) ;
|
|
else if (['GET', 'HEAD'].includes(this.method)) {
|
|
this.headers.set('Accept-Profile', this.schema);
|
|
}
|
|
else {
|
|
this.headers.set('Content-Profile', this.schema);
|
|
}
|
|
if (this.method !== 'GET' && this.method !== 'HEAD') {
|
|
this.headers.set('Content-Type', 'application/json');
|
|
}
|
|
// NOTE: Invoke w/o `this` to avoid illegal invocation error.
|
|
// https://github.com/supabase/postgrest-js/pull/247
|
|
const _fetch = this.fetch;
|
|
let res = _fetch(this.url.toString(), {
|
|
method: this.method,
|
|
headers: this.headers,
|
|
body: JSON.stringify(this.body),
|
|
signal: this.signal,
|
|
}).then(async (res) => {
|
|
var _a, _b, _c, _d;
|
|
let error = null;
|
|
let data = null;
|
|
let count = null;
|
|
let status = res.status;
|
|
let statusText = res.statusText;
|
|
if (res.ok) {
|
|
if (this.method !== 'HEAD') {
|
|
const body = await res.text();
|
|
if (body === '') ;
|
|
else if (this.headers.get('Accept') === 'text/csv') {
|
|
data = body;
|
|
}
|
|
else if (this.headers.get('Accept') &&
|
|
((_a = this.headers.get('Accept')) === null || _a === void 0 ? void 0 : _a.includes('application/vnd.pgrst.plan+text'))) {
|
|
data = body;
|
|
}
|
|
else {
|
|
data = JSON.parse(body);
|
|
}
|
|
}
|
|
const countHeader = (_b = this.headers.get('Prefer')) === null || _b === void 0 ? void 0 : _b.match(/count=(exact|planned|estimated)/);
|
|
const contentRange = (_c = res.headers.get('content-range')) === null || _c === void 0 ? void 0 : _c.split('/');
|
|
if (countHeader && contentRange && contentRange.length > 1) {
|
|
count = parseInt(contentRange[1]);
|
|
}
|
|
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
|
|
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
|
|
if (this.isMaybeSingle && this.method === 'GET' && Array.isArray(data)) {
|
|
if (data.length > 1) {
|
|
error = {
|
|
// https://github.com/PostgREST/postgrest/blob/a867d79c42419af16c18c3fb019eba8df992626f/src/PostgREST/Error.hs#L553
|
|
code: 'PGRST116',
|
|
details: `Results contain ${data.length} rows, application/vnd.pgrst.object+json requires 1 row`,
|
|
hint: null,
|
|
message: 'JSON object requested, multiple (or no) rows returned',
|
|
};
|
|
data = null;
|
|
count = null;
|
|
status = 406;
|
|
statusText = 'Not Acceptable';
|
|
}
|
|
else if (data.length === 1) {
|
|
data = data[0];
|
|
}
|
|
else {
|
|
data = null;
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
const body = await res.text();
|
|
try {
|
|
error = JSON.parse(body);
|
|
// Workaround for https://github.com/supabase/postgrest-js/issues/295
|
|
if (Array.isArray(error) && res.status === 404) {
|
|
data = [];
|
|
error = null;
|
|
status = 200;
|
|
statusText = 'OK';
|
|
}
|
|
}
|
|
catch (_e) {
|
|
// Workaround for https://github.com/supabase/postgrest-js/issues/295
|
|
if (res.status === 404 && body === '') {
|
|
status = 204;
|
|
statusText = 'No Content';
|
|
}
|
|
else {
|
|
error = {
|
|
message: body,
|
|
};
|
|
}
|
|
}
|
|
if (error && this.isMaybeSingle && ((_d = error === null || error === void 0 ? void 0 : error.details) === null || _d === void 0 ? void 0 : _d.includes('0 rows'))) {
|
|
error = null;
|
|
status = 200;
|
|
statusText = 'OK';
|
|
}
|
|
if (error && this.shouldThrowOnError) {
|
|
throw new PostgrestError_1$1.default(error);
|
|
}
|
|
}
|
|
const postgrestResponse = {
|
|
error,
|
|
data,
|
|
count,
|
|
status,
|
|
statusText,
|
|
};
|
|
return postgrestResponse;
|
|
});
|
|
if (!this.shouldThrowOnError) {
|
|
res = res.catch((fetchError) => {
|
|
var _a, _b, _c;
|
|
return ({
|
|
error: {
|
|
message: `${(_a = fetchError === null || fetchError === void 0 ? void 0 : fetchError.name) !== null && _a !== void 0 ? _a : 'FetchError'}: ${fetchError === null || fetchError === void 0 ? void 0 : fetchError.message}`,
|
|
details: `${(_b = fetchError === null || fetchError === void 0 ? void 0 : fetchError.stack) !== null && _b !== void 0 ? _b : ''}`,
|
|
hint: '',
|
|
code: `${(_c = fetchError === null || fetchError === void 0 ? void 0 : fetchError.code) !== null && _c !== void 0 ? _c : ''}`,
|
|
},
|
|
data: null,
|
|
count: null,
|
|
status: 0,
|
|
statusText: '',
|
|
});
|
|
});
|
|
}
|
|
return res.then(onfulfilled, onrejected);
|
|
}
|
|
/**
|
|
* Override the type of the returned `data`.
|
|
*
|
|
* @typeParam NewResult - The new result type to override with
|
|
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
|
|
*/
|
|
returns() {
|
|
/* istanbul ignore next */
|
|
return this;
|
|
}
|
|
/**
|
|
* Override the type of the returned `data` field in the response.
|
|
*
|
|
* @typeParam NewResult - The new type to cast the response data to
|
|
* @typeParam Options - Optional type configuration (defaults to { merge: true })
|
|
* @typeParam Options.merge - When true, merges the new type with existing return type. When false, replaces the existing types entirely (defaults to true)
|
|
* @example
|
|
* ```typescript
|
|
* // Merge with existing types (default behavior)
|
|
* const query = supabase
|
|
* .from('users')
|
|
* .select()
|
|
* .overrideTypes<{ custom_field: string }>()
|
|
*
|
|
* // Replace existing types completely
|
|
* const replaceQuery = supabase
|
|
* .from('users')
|
|
* .select()
|
|
* .overrideTypes<{ id: number; name: string }, { merge: false }>()
|
|
* ```
|
|
* @returns A PostgrestBuilder instance with the new type
|
|
*/
|
|
overrideTypes() {
|
|
return this;
|
|
}
|
|
};
|
|
PostgrestBuilder$2.default = PostgrestBuilder$1;
|
|
|
|
var __importDefault$4 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(PostgrestTransformBuilder$2, "__esModule", { value: true });
|
|
const PostgrestBuilder_1$1 = __importDefault$4(PostgrestBuilder$2);
|
|
let PostgrestTransformBuilder$1 = class PostgrestTransformBuilder extends PostgrestBuilder_1$1.default {
|
|
/**
|
|
* Perform a SELECT on the query result.
|
|
*
|
|
* By default, `.insert()`, `.update()`, `.upsert()`, and `.delete()` do not
|
|
* return modified rows. By calling this method, modified rows are returned in
|
|
* `data`.
|
|
*
|
|
* @param columns - The columns to retrieve, separated by commas
|
|
*/
|
|
select(columns) {
|
|
// Remove whitespaces except when quoted
|
|
let quoted = false;
|
|
const cleanedColumns = (columns !== null && columns !== void 0 ? columns : '*')
|
|
.split('')
|
|
.map((c) => {
|
|
if (/\s/.test(c) && !quoted) {
|
|
return '';
|
|
}
|
|
if (c === '"') {
|
|
quoted = !quoted;
|
|
}
|
|
return c;
|
|
})
|
|
.join('');
|
|
this.url.searchParams.set('select', cleanedColumns);
|
|
this.headers.append('Prefer', 'return=representation');
|
|
return this;
|
|
}
|
|
/**
|
|
* Order the query result by `column`.
|
|
*
|
|
* You can call this method multiple times to order by multiple columns.
|
|
*
|
|
* You can order referenced tables, but it only affects the ordering of the
|
|
* parent table if you use `!inner` in the query.
|
|
*
|
|
* @param column - The column to order by
|
|
* @param options - Named parameters
|
|
* @param options.ascending - If `true`, the result will be in ascending order
|
|
* @param options.nullsFirst - If `true`, `null`s appear first. If `false`,
|
|
* `null`s appear last.
|
|
* @param options.referencedTable - Set this to order a referenced table by
|
|
* its columns
|
|
* @param options.foreignTable - Deprecated, use `options.referencedTable`
|
|
* instead
|
|
*/
|
|
order(column, { ascending = true, nullsFirst, foreignTable, referencedTable = foreignTable, } = {}) {
|
|
const key = referencedTable ? `${referencedTable}.order` : 'order';
|
|
const existingOrder = this.url.searchParams.get(key);
|
|
this.url.searchParams.set(key, `${existingOrder ? `${existingOrder},` : ''}${column}.${ascending ? 'asc' : 'desc'}${nullsFirst === undefined ? '' : nullsFirst ? '.nullsfirst' : '.nullslast'}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Limit the query result by `count`.
|
|
*
|
|
* @param count - The maximum number of rows to return
|
|
* @param options - Named parameters
|
|
* @param options.referencedTable - Set this to limit rows of referenced
|
|
* tables instead of the parent table
|
|
* @param options.foreignTable - Deprecated, use `options.referencedTable`
|
|
* instead
|
|
*/
|
|
limit(count, { foreignTable, referencedTable = foreignTable, } = {}) {
|
|
const key = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`;
|
|
this.url.searchParams.set(key, `${count}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Limit the query result by starting at an offset `from` and ending at the offset `to`.
|
|
* Only records within this range are returned.
|
|
* This respects the query order and if there is no order clause the range could behave unexpectedly.
|
|
* The `from` and `to` values are 0-based and inclusive: `range(1, 3)` will include the second, third
|
|
* and fourth rows of the query.
|
|
*
|
|
* @param from - The starting index from which to limit the result
|
|
* @param to - The last index to which to limit the result
|
|
* @param options - Named parameters
|
|
* @param options.referencedTable - Set this to limit rows of referenced
|
|
* tables instead of the parent table
|
|
* @param options.foreignTable - Deprecated, use `options.referencedTable`
|
|
* instead
|
|
*/
|
|
range(from, to, { foreignTable, referencedTable = foreignTable, } = {}) {
|
|
const keyOffset = typeof referencedTable === 'undefined' ? 'offset' : `${referencedTable}.offset`;
|
|
const keyLimit = typeof referencedTable === 'undefined' ? 'limit' : `${referencedTable}.limit`;
|
|
this.url.searchParams.set(keyOffset, `${from}`);
|
|
// Range is inclusive, so add 1
|
|
this.url.searchParams.set(keyLimit, `${to - from + 1}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Set the AbortSignal for the fetch request.
|
|
*
|
|
* @param signal - The AbortSignal to use for the fetch request
|
|
*/
|
|
abortSignal(signal) {
|
|
this.signal = signal;
|
|
return this;
|
|
}
|
|
/**
|
|
* Return `data` as a single object instead of an array of objects.
|
|
*
|
|
* Query result must be one row (e.g. using `.limit(1)`), otherwise this
|
|
* returns an error.
|
|
*/
|
|
single() {
|
|
this.headers.set('Accept', 'application/vnd.pgrst.object+json');
|
|
return this;
|
|
}
|
|
/**
|
|
* Return `data` as a single object instead of an array of objects.
|
|
*
|
|
* Query result must be zero or one row (e.g. using `.limit(1)`), otherwise
|
|
* this returns an error.
|
|
*/
|
|
maybeSingle() {
|
|
// Temporary partial fix for https://github.com/supabase/postgrest-js/issues/361
|
|
// Issue persists e.g. for `.insert([...]).select().maybeSingle()`
|
|
if (this.method === 'GET') {
|
|
this.headers.set('Accept', 'application/json');
|
|
}
|
|
else {
|
|
this.headers.set('Accept', 'application/vnd.pgrst.object+json');
|
|
}
|
|
this.isMaybeSingle = true;
|
|
return this;
|
|
}
|
|
/**
|
|
* Return `data` as a string in CSV format.
|
|
*/
|
|
csv() {
|
|
this.headers.set('Accept', 'text/csv');
|
|
return this;
|
|
}
|
|
/**
|
|
* Return `data` as an object in [GeoJSON](https://geojson.org) format.
|
|
*/
|
|
geojson() {
|
|
this.headers.set('Accept', 'application/geo+json');
|
|
return this;
|
|
}
|
|
/**
|
|
* Return `data` as the EXPLAIN plan for the query.
|
|
*
|
|
* You need to enable the
|
|
* [db_plan_enabled](https://supabase.com/docs/guides/database/debugging-performance#enabling-explain)
|
|
* setting before using this method.
|
|
*
|
|
* @param options - Named parameters
|
|
*
|
|
* @param options.analyze - If `true`, the query will be executed and the
|
|
* actual run time will be returned
|
|
*
|
|
* @param options.verbose - If `true`, the query identifier will be returned
|
|
* and `data` will include the output columns of the query
|
|
*
|
|
* @param options.settings - If `true`, include information on configuration
|
|
* parameters that affect query planning
|
|
*
|
|
* @param options.buffers - If `true`, include information on buffer usage
|
|
*
|
|
* @param options.wal - If `true`, include information on WAL record generation
|
|
*
|
|
* @param options.format - The format of the output, can be `"text"` (default)
|
|
* or `"json"`
|
|
*/
|
|
explain({ analyze = false, verbose = false, settings = false, buffers = false, wal = false, format = 'text', } = {}) {
|
|
var _a;
|
|
const options = [
|
|
analyze ? 'analyze' : null,
|
|
verbose ? 'verbose' : null,
|
|
settings ? 'settings' : null,
|
|
buffers ? 'buffers' : null,
|
|
wal ? 'wal' : null,
|
|
]
|
|
.filter(Boolean)
|
|
.join('|');
|
|
// An Accept header can carry multiple media types but postgrest-js always sends one
|
|
const forMediatype = (_a = this.headers.get('Accept')) !== null && _a !== void 0 ? _a : 'application/json';
|
|
this.headers.set('Accept', `application/vnd.pgrst.plan+${format}; for="${forMediatype}"; options=${options};`);
|
|
if (format === 'json') {
|
|
return this;
|
|
}
|
|
else {
|
|
return this;
|
|
}
|
|
}
|
|
/**
|
|
* Rollback the query.
|
|
*
|
|
* `data` will still be returned, but the query is not committed.
|
|
*/
|
|
rollback() {
|
|
this.headers.append('Prefer', 'tx=rollback');
|
|
return this;
|
|
}
|
|
/**
|
|
* Override the type of the returned `data`.
|
|
*
|
|
* @typeParam NewResult - The new result type to override with
|
|
* @deprecated Use overrideTypes<yourType, { merge: false }>() method at the end of your call chain instead
|
|
*/
|
|
returns() {
|
|
return this;
|
|
}
|
|
/**
|
|
* Set the maximum number of rows that can be affected by the query.
|
|
* Only available in PostgREST v13+ and only works with PATCH and DELETE methods.
|
|
*
|
|
* @param value - The maximum number of rows that can be affected
|
|
*/
|
|
maxAffected(value) {
|
|
this.headers.append('Prefer', 'handling=strict');
|
|
this.headers.append('Prefer', `max-affected=${value}`);
|
|
return this;
|
|
}
|
|
};
|
|
PostgrestTransformBuilder$2.default = PostgrestTransformBuilder$1;
|
|
|
|
var __importDefault$3 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(PostgrestFilterBuilder$2, "__esModule", { value: true });
|
|
const PostgrestTransformBuilder_1$1 = __importDefault$3(PostgrestTransformBuilder$2);
|
|
let PostgrestFilterBuilder$1 = class PostgrestFilterBuilder extends PostgrestTransformBuilder_1$1.default {
|
|
/**
|
|
* Match only rows where `column` is equal to `value`.
|
|
*
|
|
* To check if the value of `column` is NULL, you should use `.is()` instead.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param value - The value to filter with
|
|
*/
|
|
eq(column, value) {
|
|
this.url.searchParams.append(column, `eq.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` is not equal to `value`.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param value - The value to filter with
|
|
*/
|
|
neq(column, value) {
|
|
this.url.searchParams.append(column, `neq.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` is greater than `value`.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param value - The value to filter with
|
|
*/
|
|
gt(column, value) {
|
|
this.url.searchParams.append(column, `gt.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` is greater than or equal to `value`.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param value - The value to filter with
|
|
*/
|
|
gte(column, value) {
|
|
this.url.searchParams.append(column, `gte.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` is less than `value`.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param value - The value to filter with
|
|
*/
|
|
lt(column, value) {
|
|
this.url.searchParams.append(column, `lt.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` is less than or equal to `value`.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param value - The value to filter with
|
|
*/
|
|
lte(column, value) {
|
|
this.url.searchParams.append(column, `lte.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` matches `pattern` case-sensitively.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param pattern - The pattern to match with
|
|
*/
|
|
like(column, pattern) {
|
|
this.url.searchParams.append(column, `like.${pattern}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` matches all of `patterns` case-sensitively.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param patterns - The patterns to match with
|
|
*/
|
|
likeAllOf(column, patterns) {
|
|
this.url.searchParams.append(column, `like(all).{${patterns.join(',')}}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` matches any of `patterns` case-sensitively.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param patterns - The patterns to match with
|
|
*/
|
|
likeAnyOf(column, patterns) {
|
|
this.url.searchParams.append(column, `like(any).{${patterns.join(',')}}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` matches `pattern` case-insensitively.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param pattern - The pattern to match with
|
|
*/
|
|
ilike(column, pattern) {
|
|
this.url.searchParams.append(column, `ilike.${pattern}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` matches all of `patterns` case-insensitively.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param patterns - The patterns to match with
|
|
*/
|
|
ilikeAllOf(column, patterns) {
|
|
this.url.searchParams.append(column, `ilike(all).{${patterns.join(',')}}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` matches any of `patterns` case-insensitively.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param patterns - The patterns to match with
|
|
*/
|
|
ilikeAnyOf(column, patterns) {
|
|
this.url.searchParams.append(column, `ilike(any).{${patterns.join(',')}}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` IS `value`.
|
|
*
|
|
* For non-boolean columns, this is only relevant for checking if the value of
|
|
* `column` is NULL by setting `value` to `null`.
|
|
*
|
|
* For boolean columns, you can also set `value` to `true` or `false` and it
|
|
* will behave the same way as `.eq()`.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param value - The value to filter with
|
|
*/
|
|
is(column, value) {
|
|
this.url.searchParams.append(column, `is.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where `column` is included in the `values` array.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param values - The values array to filter with
|
|
*/
|
|
in(column, values) {
|
|
const cleanedValues = Array.from(new Set(values))
|
|
.map((s) => {
|
|
// handle postgrest reserved characters
|
|
// https://postgrest.org/en/v7.0.0/api.html#reserved-characters
|
|
if (typeof s === 'string' && new RegExp('[,()]').test(s))
|
|
return `"${s}"`;
|
|
else
|
|
return `${s}`;
|
|
})
|
|
.join(',');
|
|
this.url.searchParams.append(column, `in.(${cleanedValues})`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for jsonb, array, and range columns. Match only rows where
|
|
* `column` contains every element appearing in `value`.
|
|
*
|
|
* @param column - The jsonb, array, or range column to filter on
|
|
* @param value - The jsonb, array, or range value to filter with
|
|
*/
|
|
contains(column, value) {
|
|
if (typeof value === 'string') {
|
|
// range types can be inclusive '[', ']' or exclusive '(', ')' so just
|
|
// keep it simple and accept a string
|
|
this.url.searchParams.append(column, `cs.${value}`);
|
|
}
|
|
else if (Array.isArray(value)) {
|
|
// array
|
|
this.url.searchParams.append(column, `cs.{${value.join(',')}}`);
|
|
}
|
|
else {
|
|
// json
|
|
this.url.searchParams.append(column, `cs.${JSON.stringify(value)}`);
|
|
}
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for jsonb, array, and range columns. Match only rows where
|
|
* every element appearing in `column` is contained by `value`.
|
|
*
|
|
* @param column - The jsonb, array, or range column to filter on
|
|
* @param value - The jsonb, array, or range value to filter with
|
|
*/
|
|
containedBy(column, value) {
|
|
if (typeof value === 'string') {
|
|
// range
|
|
this.url.searchParams.append(column, `cd.${value}`);
|
|
}
|
|
else if (Array.isArray(value)) {
|
|
// array
|
|
this.url.searchParams.append(column, `cd.{${value.join(',')}}`);
|
|
}
|
|
else {
|
|
// json
|
|
this.url.searchParams.append(column, `cd.${JSON.stringify(value)}`);
|
|
}
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for range columns. Match only rows where every element in
|
|
* `column` is greater than any element in `range`.
|
|
*
|
|
* @param column - The range column to filter on
|
|
* @param range - The range to filter with
|
|
*/
|
|
rangeGt(column, range) {
|
|
this.url.searchParams.append(column, `sr.${range}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for range columns. Match only rows where every element in
|
|
* `column` is either contained in `range` or greater than any element in
|
|
* `range`.
|
|
*
|
|
* @param column - The range column to filter on
|
|
* @param range - The range to filter with
|
|
*/
|
|
rangeGte(column, range) {
|
|
this.url.searchParams.append(column, `nxl.${range}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for range columns. Match only rows where every element in
|
|
* `column` is less than any element in `range`.
|
|
*
|
|
* @param column - The range column to filter on
|
|
* @param range - The range to filter with
|
|
*/
|
|
rangeLt(column, range) {
|
|
this.url.searchParams.append(column, `sl.${range}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for range columns. Match only rows where every element in
|
|
* `column` is either contained in `range` or less than any element in
|
|
* `range`.
|
|
*
|
|
* @param column - The range column to filter on
|
|
* @param range - The range to filter with
|
|
*/
|
|
rangeLte(column, range) {
|
|
this.url.searchParams.append(column, `nxr.${range}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for range columns. Match only rows where `column` is
|
|
* mutually exclusive to `range` and there can be no element between the two
|
|
* ranges.
|
|
*
|
|
* @param column - The range column to filter on
|
|
* @param range - The range to filter with
|
|
*/
|
|
rangeAdjacent(column, range) {
|
|
this.url.searchParams.append(column, `adj.${range}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for array and range columns. Match only rows where
|
|
* `column` and `value` have an element in common.
|
|
*
|
|
* @param column - The array or range column to filter on
|
|
* @param value - The array or range value to filter with
|
|
*/
|
|
overlaps(column, value) {
|
|
if (typeof value === 'string') {
|
|
// range
|
|
this.url.searchParams.append(column, `ov.${value}`);
|
|
}
|
|
else {
|
|
// array
|
|
this.url.searchParams.append(column, `ov.{${value.join(',')}}`);
|
|
}
|
|
return this;
|
|
}
|
|
/**
|
|
* Only relevant for text and tsvector columns. Match only rows where
|
|
* `column` matches the query string in `query`.
|
|
*
|
|
* @param column - The text or tsvector column to filter on
|
|
* @param query - The query text to match with
|
|
* @param options - Named parameters
|
|
* @param options.config - The text search configuration to use
|
|
* @param options.type - Change how the `query` text is interpreted
|
|
*/
|
|
textSearch(column, query, { config, type } = {}) {
|
|
let typePart = '';
|
|
if (type === 'plain') {
|
|
typePart = 'pl';
|
|
}
|
|
else if (type === 'phrase') {
|
|
typePart = 'ph';
|
|
}
|
|
else if (type === 'websearch') {
|
|
typePart = 'w';
|
|
}
|
|
const configPart = config === undefined ? '' : `(${config})`;
|
|
this.url.searchParams.append(column, `${typePart}fts${configPart}.${query}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows where each column in `query` keys is equal to its
|
|
* associated value. Shorthand for multiple `.eq()`s.
|
|
*
|
|
* @param query - The object to filter with, with column names as keys mapped
|
|
* to their filter values
|
|
*/
|
|
match(query) {
|
|
Object.entries(query).forEach(([column, value]) => {
|
|
this.url.searchParams.append(column, `eq.${value}`);
|
|
});
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows which doesn't satisfy the filter.
|
|
*
|
|
* Unlike most filters, `opearator` and `value` are used as-is and need to
|
|
* follow [PostgREST
|
|
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
|
|
* to make sure they are properly sanitized.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param operator - The operator to be negated to filter with, following
|
|
* PostgREST syntax
|
|
* @param value - The value to filter with, following PostgREST syntax
|
|
*/
|
|
not(column, operator, value) {
|
|
this.url.searchParams.append(column, `not.${operator}.${value}`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows which satisfy at least one of the filters.
|
|
*
|
|
* Unlike most filters, `filters` is used as-is and needs to follow [PostgREST
|
|
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
|
|
* to make sure it's properly sanitized.
|
|
*
|
|
* It's currently not possible to do an `.or()` filter across multiple tables.
|
|
*
|
|
* @param filters - The filters to use, following PostgREST syntax
|
|
* @param options - Named parameters
|
|
* @param options.referencedTable - Set this to filter on referenced tables
|
|
* instead of the parent table
|
|
* @param options.foreignTable - Deprecated, use `referencedTable` instead
|
|
*/
|
|
or(filters, { foreignTable, referencedTable = foreignTable, } = {}) {
|
|
const key = referencedTable ? `${referencedTable}.or` : 'or';
|
|
this.url.searchParams.append(key, `(${filters})`);
|
|
return this;
|
|
}
|
|
/**
|
|
* Match only rows which satisfy the filter. This is an escape hatch - you
|
|
* should use the specific filter methods wherever possible.
|
|
*
|
|
* Unlike most filters, `opearator` and `value` are used as-is and need to
|
|
* follow [PostgREST
|
|
* syntax](https://postgrest.org/en/stable/api.html#operators). You also need
|
|
* to make sure they are properly sanitized.
|
|
*
|
|
* @param column - The column to filter on
|
|
* @param operator - The operator to filter with, following PostgREST syntax
|
|
* @param value - The value to filter with, following PostgREST syntax
|
|
*/
|
|
filter(column, operator, value) {
|
|
this.url.searchParams.append(column, `${operator}.${value}`);
|
|
return this;
|
|
}
|
|
};
|
|
PostgrestFilterBuilder$2.default = PostgrestFilterBuilder$1;
|
|
|
|
var __importDefault$2 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(PostgrestQueryBuilder$2, "__esModule", { value: true });
|
|
const PostgrestFilterBuilder_1$2 = __importDefault$2(PostgrestFilterBuilder$2);
|
|
let PostgrestQueryBuilder$1 = class PostgrestQueryBuilder {
|
|
constructor(url, { headers = {}, schema, fetch, }) {
|
|
this.url = url;
|
|
this.headers = new Headers(headers);
|
|
this.schema = schema;
|
|
this.fetch = fetch;
|
|
}
|
|
/**
|
|
* Perform a SELECT query on the table or view.
|
|
*
|
|
* @param columns - The columns to retrieve, separated by commas. Columns can be renamed when returned with `customName:columnName`
|
|
*
|
|
* @param options - Named parameters
|
|
*
|
|
* @param options.head - When set to `true`, `data` will not be returned.
|
|
* Useful if you only need the count.
|
|
*
|
|
* @param options.count - Count algorithm to use to count rows in the table or view.
|
|
*
|
|
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
|
|
* hood.
|
|
*
|
|
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
|
|
* statistics under the hood.
|
|
*
|
|
* `"estimated"`: Uses exact count for low numbers and planned count for high
|
|
* numbers.
|
|
*/
|
|
select(columns, { head = false, count, } = {}) {
|
|
const method = head ? 'HEAD' : 'GET';
|
|
// Remove whitespaces except when quoted
|
|
let quoted = false;
|
|
const cleanedColumns = (columns !== null && columns !== void 0 ? columns : '*')
|
|
.split('')
|
|
.map((c) => {
|
|
if (/\s/.test(c) && !quoted) {
|
|
return '';
|
|
}
|
|
if (c === '"') {
|
|
quoted = !quoted;
|
|
}
|
|
return c;
|
|
})
|
|
.join('');
|
|
this.url.searchParams.set('select', cleanedColumns);
|
|
if (count) {
|
|
this.headers.append('Prefer', `count=${count}`);
|
|
}
|
|
return new PostgrestFilterBuilder_1$2.default({
|
|
method,
|
|
url: this.url,
|
|
headers: this.headers,
|
|
schema: this.schema,
|
|
fetch: this.fetch,
|
|
});
|
|
}
|
|
/**
|
|
* Perform an INSERT into the table or view.
|
|
*
|
|
* By default, inserted rows are not returned. To return it, chain the call
|
|
* with `.select()`.
|
|
*
|
|
* @param values - The values to insert. Pass an object to insert a single row
|
|
* or an array to insert multiple rows.
|
|
*
|
|
* @param options - Named parameters
|
|
*
|
|
* @param options.count - Count algorithm to use to count inserted rows.
|
|
*
|
|
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
|
|
* hood.
|
|
*
|
|
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
|
|
* statistics under the hood.
|
|
*
|
|
* `"estimated"`: Uses exact count for low numbers and planned count for high
|
|
* numbers.
|
|
*
|
|
* @param options.defaultToNull - Make missing fields default to `null`.
|
|
* Otherwise, use the default value for the column. Only applies for bulk
|
|
* inserts.
|
|
*/
|
|
insert(values, { count, defaultToNull = true, } = {}) {
|
|
var _a;
|
|
const method = 'POST';
|
|
if (count) {
|
|
this.headers.append('Prefer', `count=${count}`);
|
|
}
|
|
if (!defaultToNull) {
|
|
this.headers.append('Prefer', `missing=default`);
|
|
}
|
|
if (Array.isArray(values)) {
|
|
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), []);
|
|
if (columns.length > 0) {
|
|
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`);
|
|
this.url.searchParams.set('columns', uniqueColumns.join(','));
|
|
}
|
|
}
|
|
return new PostgrestFilterBuilder_1$2.default({
|
|
method,
|
|
url: this.url,
|
|
headers: this.headers,
|
|
schema: this.schema,
|
|
body: values,
|
|
fetch: (_a = this.fetch) !== null && _a !== void 0 ? _a : fetch,
|
|
});
|
|
}
|
|
/**
|
|
* Perform an UPSERT on the table or view. Depending on the column(s) passed
|
|
* to `onConflict`, `.upsert()` allows you to perform the equivalent of
|
|
* `.insert()` if a row with the corresponding `onConflict` columns doesn't
|
|
* exist, or if it does exist, perform an alternative action depending on
|
|
* `ignoreDuplicates`.
|
|
*
|
|
* By default, upserted rows are not returned. To return it, chain the call
|
|
* with `.select()`.
|
|
*
|
|
* @param values - The values to upsert with. Pass an object to upsert a
|
|
* single row or an array to upsert multiple rows.
|
|
*
|
|
* @param options - Named parameters
|
|
*
|
|
* @param options.onConflict - Comma-separated UNIQUE column(s) to specify how
|
|
* duplicate rows are determined. Two rows are duplicates if all the
|
|
* `onConflict` columns are equal.
|
|
*
|
|
* @param options.ignoreDuplicates - If `true`, duplicate rows are ignored. If
|
|
* `false`, duplicate rows are merged with existing rows.
|
|
*
|
|
* @param options.count - Count algorithm to use to count upserted rows.
|
|
*
|
|
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
|
|
* hood.
|
|
*
|
|
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
|
|
* statistics under the hood.
|
|
*
|
|
* `"estimated"`: Uses exact count for low numbers and planned count for high
|
|
* numbers.
|
|
*
|
|
* @param options.defaultToNull - Make missing fields default to `null`.
|
|
* Otherwise, use the default value for the column. This only applies when
|
|
* inserting new rows, not when merging with existing rows under
|
|
* `ignoreDuplicates: false`. This also only applies when doing bulk upserts.
|
|
*/
|
|
upsert(values, { onConflict, ignoreDuplicates = false, count, defaultToNull = true, } = {}) {
|
|
var _a;
|
|
const method = 'POST';
|
|
this.headers.append('Prefer', `resolution=${ignoreDuplicates ? 'ignore' : 'merge'}-duplicates`);
|
|
if (onConflict !== undefined)
|
|
this.url.searchParams.set('on_conflict', onConflict);
|
|
if (count) {
|
|
this.headers.append('Prefer', `count=${count}`);
|
|
}
|
|
if (!defaultToNull) {
|
|
this.headers.append('Prefer', 'missing=default');
|
|
}
|
|
if (Array.isArray(values)) {
|
|
const columns = values.reduce((acc, x) => acc.concat(Object.keys(x)), []);
|
|
if (columns.length > 0) {
|
|
const uniqueColumns = [...new Set(columns)].map((column) => `"${column}"`);
|
|
this.url.searchParams.set('columns', uniqueColumns.join(','));
|
|
}
|
|
}
|
|
return new PostgrestFilterBuilder_1$2.default({
|
|
method,
|
|
url: this.url,
|
|
headers: this.headers,
|
|
schema: this.schema,
|
|
body: values,
|
|
fetch: (_a = this.fetch) !== null && _a !== void 0 ? _a : fetch,
|
|
});
|
|
}
|
|
/**
|
|
* Perform an UPDATE on the table or view.
|
|
*
|
|
* By default, updated rows are not returned. To return it, chain the call
|
|
* with `.select()` after filters.
|
|
*
|
|
* @param values - The values to update with
|
|
*
|
|
* @param options - Named parameters
|
|
*
|
|
* @param options.count - Count algorithm to use to count updated rows.
|
|
*
|
|
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
|
|
* hood.
|
|
*
|
|
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
|
|
* statistics under the hood.
|
|
*
|
|
* `"estimated"`: Uses exact count for low numbers and planned count for high
|
|
* numbers.
|
|
*/
|
|
update(values, { count, } = {}) {
|
|
var _a;
|
|
const method = 'PATCH';
|
|
if (count) {
|
|
this.headers.append('Prefer', `count=${count}`);
|
|
}
|
|
return new PostgrestFilterBuilder_1$2.default({
|
|
method,
|
|
url: this.url,
|
|
headers: this.headers,
|
|
schema: this.schema,
|
|
body: values,
|
|
fetch: (_a = this.fetch) !== null && _a !== void 0 ? _a : fetch,
|
|
});
|
|
}
|
|
/**
|
|
* Perform a DELETE on the table or view.
|
|
*
|
|
* By default, deleted rows are not returned. To return it, chain the call
|
|
* with `.select()` after filters.
|
|
*
|
|
* @param options - Named parameters
|
|
*
|
|
* @param options.count - Count algorithm to use to count deleted rows.
|
|
*
|
|
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
|
|
* hood.
|
|
*
|
|
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
|
|
* statistics under the hood.
|
|
*
|
|
* `"estimated"`: Uses exact count for low numbers and planned count for high
|
|
* numbers.
|
|
*/
|
|
delete({ count, } = {}) {
|
|
var _a;
|
|
const method = 'DELETE';
|
|
if (count) {
|
|
this.headers.append('Prefer', `count=${count}`);
|
|
}
|
|
return new PostgrestFilterBuilder_1$2.default({
|
|
method,
|
|
url: this.url,
|
|
headers: this.headers,
|
|
schema: this.schema,
|
|
fetch: (_a = this.fetch) !== null && _a !== void 0 ? _a : fetch,
|
|
});
|
|
}
|
|
};
|
|
PostgrestQueryBuilder$2.default = PostgrestQueryBuilder$1;
|
|
|
|
var __importDefault$1 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(PostgrestClient$2, "__esModule", { value: true });
|
|
const PostgrestQueryBuilder_1$1 = __importDefault$1(PostgrestQueryBuilder$2);
|
|
const PostgrestFilterBuilder_1$1 = __importDefault$1(PostgrestFilterBuilder$2);
|
|
/**
|
|
* PostgREST client.
|
|
*
|
|
* @typeParam Database - Types for the schema from the [type
|
|
* generator](https://supabase.com/docs/reference/javascript/next/typescript-support)
|
|
*
|
|
* @typeParam SchemaName - Postgres schema to switch to. Must be a string
|
|
* literal, the same one passed to the constructor. If the schema is not
|
|
* `"public"`, this must be supplied manually.
|
|
*/
|
|
let PostgrestClient$1 = class PostgrestClient {
|
|
// TODO: Add back shouldThrowOnError once we figure out the typings
|
|
/**
|
|
* Creates a PostgREST client.
|
|
*
|
|
* @param url - URL of the PostgREST endpoint
|
|
* @param options - Named parameters
|
|
* @param options.headers - Custom headers
|
|
* @param options.schema - Postgres schema to switch to
|
|
* @param options.fetch - Custom fetch
|
|
*/
|
|
constructor(url, { headers = {}, schema, fetch, } = {}) {
|
|
this.url = url;
|
|
this.headers = new Headers(headers);
|
|
this.schemaName = schema;
|
|
this.fetch = fetch;
|
|
}
|
|
/**
|
|
* Perform a query on a table or a view.
|
|
*
|
|
* @param relation - The table or view name to query
|
|
*/
|
|
from(relation) {
|
|
const url = new URL(`${this.url}/${relation}`);
|
|
return new PostgrestQueryBuilder_1$1.default(url, {
|
|
headers: new Headers(this.headers),
|
|
schema: this.schemaName,
|
|
fetch: this.fetch,
|
|
});
|
|
}
|
|
/**
|
|
* Select a schema to query or perform an function (rpc) call.
|
|
*
|
|
* The schema needs to be on the list of exposed schemas inside Supabase.
|
|
*
|
|
* @param schema - The schema to query
|
|
*/
|
|
schema(schema) {
|
|
return new PostgrestClient(this.url, {
|
|
headers: this.headers,
|
|
schema,
|
|
fetch: this.fetch,
|
|
});
|
|
}
|
|
/**
|
|
* Perform a function call.
|
|
*
|
|
* @param fn - The function name to call
|
|
* @param args - The arguments to pass to the function call
|
|
* @param options - Named parameters
|
|
* @param options.head - When set to `true`, `data` will not be returned.
|
|
* Useful if you only need the count.
|
|
* @param options.get - When set to `true`, the function will be called with
|
|
* read-only access mode.
|
|
* @param options.count - Count algorithm to use to count rows returned by the
|
|
* function. Only applicable for [set-returning
|
|
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
|
|
*
|
|
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
|
|
* hood.
|
|
*
|
|
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
|
|
* statistics under the hood.
|
|
*
|
|
* `"estimated"`: Uses exact count for low numbers and planned count for high
|
|
* numbers.
|
|
*/
|
|
rpc(fn, args = {}, { head = false, get = false, count, } = {}) {
|
|
var _a;
|
|
let method;
|
|
const url = new URL(`${this.url}/rpc/${fn}`);
|
|
let body;
|
|
if (head || get) {
|
|
method = head ? 'HEAD' : 'GET';
|
|
Object.entries(args)
|
|
// params with undefined value needs to be filtered out, otherwise it'll
|
|
// show up as `?param=undefined`
|
|
.filter(([_, value]) => value !== undefined)
|
|
// array values need special syntax
|
|
.map(([name, value]) => [name, Array.isArray(value) ? `{${value.join(',')}}` : `${value}`])
|
|
.forEach(([name, value]) => {
|
|
url.searchParams.append(name, value);
|
|
});
|
|
}
|
|
else {
|
|
method = 'POST';
|
|
body = args;
|
|
}
|
|
const headers = new Headers(this.headers);
|
|
if (count) {
|
|
headers.set('Prefer', `count=${count}`);
|
|
}
|
|
return new PostgrestFilterBuilder_1$1.default({
|
|
method,
|
|
url,
|
|
headers,
|
|
schema: this.schemaName,
|
|
body,
|
|
fetch: (_a = this.fetch) !== null && _a !== void 0 ? _a : fetch,
|
|
});
|
|
}
|
|
};
|
|
PostgrestClient$2.default = PostgrestClient$1;
|
|
|
|
var __importDefault = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(cjs, "__esModule", { value: true });
|
|
cjs.PostgrestError = cjs.PostgrestBuilder = cjs.PostgrestTransformBuilder = cjs.PostgrestFilterBuilder = cjs.PostgrestQueryBuilder = cjs.PostgrestClient = void 0;
|
|
// Always update wrapper.mjs when updating this file.
|
|
const PostgrestClient_1 = __importDefault(PostgrestClient$2);
|
|
cjs.PostgrestClient = PostgrestClient_1.default;
|
|
const PostgrestQueryBuilder_1 = __importDefault(PostgrestQueryBuilder$2);
|
|
cjs.PostgrestQueryBuilder = PostgrestQueryBuilder_1.default;
|
|
const PostgrestFilterBuilder_1 = __importDefault(PostgrestFilterBuilder$2);
|
|
cjs.PostgrestFilterBuilder = PostgrestFilterBuilder_1.default;
|
|
const PostgrestTransformBuilder_1 = __importDefault(PostgrestTransformBuilder$2);
|
|
cjs.PostgrestTransformBuilder = PostgrestTransformBuilder_1.default;
|
|
const PostgrestBuilder_1 = __importDefault(PostgrestBuilder$2);
|
|
cjs.PostgrestBuilder = PostgrestBuilder_1.default;
|
|
const PostgrestError_1 = __importDefault(PostgrestError$2);
|
|
cjs.PostgrestError = PostgrestError_1.default;
|
|
var _default = cjs.default = {
|
|
PostgrestClient: PostgrestClient_1.default,
|
|
PostgrestQueryBuilder: PostgrestQueryBuilder_1.default,
|
|
PostgrestFilterBuilder: PostgrestFilterBuilder_1.default,
|
|
PostgrestTransformBuilder: PostgrestTransformBuilder_1.default,
|
|
PostgrestBuilder: PostgrestBuilder_1.default,
|
|
PostgrestError: PostgrestError_1.default,
|
|
};
|
|
|
|
const {
|
|
PostgrestClient,
|
|
PostgrestQueryBuilder,
|
|
PostgrestFilterBuilder,
|
|
PostgrestTransformBuilder,
|
|
PostgrestBuilder,
|
|
PostgrestError,
|
|
} = _default;
|
|
|
|
class WebSocketFactory {
|
|
static detectEnvironment() {
|
|
var _a;
|
|
if (typeof WebSocket !== 'undefined') {
|
|
return { type: 'native', constructor: WebSocket };
|
|
}
|
|
if (typeof globalThis !== 'undefined' &&
|
|
typeof globalThis.WebSocket !== 'undefined') {
|
|
return { type: 'native', constructor: globalThis.WebSocket };
|
|
}
|
|
if (typeof global !== 'undefined' &&
|
|
typeof global.WebSocket !== 'undefined') {
|
|
return { type: 'native', constructor: global.WebSocket };
|
|
}
|
|
if (typeof globalThis !== 'undefined' &&
|
|
typeof globalThis.WebSocketPair !== 'undefined' &&
|
|
typeof globalThis.WebSocket === 'undefined') {
|
|
return {
|
|
type: 'cloudflare',
|
|
error: 'Cloudflare Workers detected. WebSocket clients are not supported in Cloudflare Workers.',
|
|
workaround: 'Use Cloudflare Workers WebSocket API for server-side WebSocket handling, or deploy to a different runtime.',
|
|
};
|
|
}
|
|
if ((typeof globalThis !== 'undefined' && globalThis.EdgeRuntime) ||
|
|
(typeof navigator !== 'undefined' &&
|
|
((_a = navigator.userAgent) === null || _a === void 0 ? void 0 : _a.includes('Vercel-Edge')))) {
|
|
return {
|
|
type: 'unsupported',
|
|
error: 'Edge runtime detected (Vercel Edge/Netlify Edge). WebSockets are not supported in edge functions.',
|
|
workaround: 'Use serverless functions or a different deployment target for WebSocket functionality.',
|
|
};
|
|
}
|
|
if (typeof process !== 'undefined' &&
|
|
process.versions &&
|
|
process.versions.node) {
|
|
const nodeVersion = parseInt(process.versions.node.split('.')[0]);
|
|
// Node.js 22+ should have native WebSocket
|
|
if (nodeVersion >= 22) {
|
|
// Check if native WebSocket is available (should be in Node.js 22+)
|
|
if (typeof globalThis.WebSocket !== 'undefined') {
|
|
return { type: 'native', constructor: globalThis.WebSocket };
|
|
}
|
|
// If not available, user needs to provide it
|
|
return {
|
|
type: 'unsupported',
|
|
error: `Node.js ${nodeVersion} detected but native WebSocket not found.`,
|
|
workaround: 'Provide a WebSocket implementation via the transport option.',
|
|
};
|
|
}
|
|
// Node.js < 22 doesn't have native WebSocket
|
|
return {
|
|
type: 'unsupported',
|
|
error: `Node.js ${nodeVersion} detected without native WebSocket support.`,
|
|
workaround: 'For Node.js < 22, install "ws" package and provide it via the transport option:\n' +
|
|
'import ws from "ws"\n' +
|
|
'new RealtimeClient(url, { transport: ws })',
|
|
};
|
|
}
|
|
return {
|
|
type: 'unsupported',
|
|
error: 'Unknown JavaScript runtime without WebSocket support.',
|
|
workaround: "Ensure you're running in a supported environment (browser, Node.js, Deno) or provide a custom WebSocket implementation.",
|
|
};
|
|
}
|
|
static getWebSocketConstructor() {
|
|
const env = this.detectEnvironment();
|
|
if (env.constructor) {
|
|
return env.constructor;
|
|
}
|
|
let errorMessage = env.error || 'WebSocket not supported in this environment.';
|
|
if (env.workaround) {
|
|
errorMessage += `\n\nSuggested solution: ${env.workaround}`;
|
|
}
|
|
throw new Error(errorMessage);
|
|
}
|
|
static createWebSocket(url, protocols) {
|
|
const WS = this.getWebSocketConstructor();
|
|
return new WS(url, protocols);
|
|
}
|
|
static isWebSocketSupported() {
|
|
try {
|
|
const env = this.detectEnvironment();
|
|
return env.type === 'native' || env.type === 'ws';
|
|
}
|
|
catch (_a) {
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
|
|
const version$3 = '2.15.1';
|
|
|
|
const DEFAULT_VERSION = `realtime-js/${version$3}`;
|
|
const VSN = '1.0.0';
|
|
const DEFAULT_TIMEOUT = 10000;
|
|
const WS_CLOSE_NORMAL = 1000;
|
|
const MAX_PUSH_BUFFER_SIZE = 100;
|
|
var SOCKET_STATES;
|
|
(function (SOCKET_STATES) {
|
|
SOCKET_STATES[SOCKET_STATES["connecting"] = 0] = "connecting";
|
|
SOCKET_STATES[SOCKET_STATES["open"] = 1] = "open";
|
|
SOCKET_STATES[SOCKET_STATES["closing"] = 2] = "closing";
|
|
SOCKET_STATES[SOCKET_STATES["closed"] = 3] = "closed";
|
|
})(SOCKET_STATES || (SOCKET_STATES = {}));
|
|
var CHANNEL_STATES;
|
|
(function (CHANNEL_STATES) {
|
|
CHANNEL_STATES["closed"] = "closed";
|
|
CHANNEL_STATES["errored"] = "errored";
|
|
CHANNEL_STATES["joined"] = "joined";
|
|
CHANNEL_STATES["joining"] = "joining";
|
|
CHANNEL_STATES["leaving"] = "leaving";
|
|
})(CHANNEL_STATES || (CHANNEL_STATES = {}));
|
|
var CHANNEL_EVENTS;
|
|
(function (CHANNEL_EVENTS) {
|
|
CHANNEL_EVENTS["close"] = "phx_close";
|
|
CHANNEL_EVENTS["error"] = "phx_error";
|
|
CHANNEL_EVENTS["join"] = "phx_join";
|
|
CHANNEL_EVENTS["reply"] = "phx_reply";
|
|
CHANNEL_EVENTS["leave"] = "phx_leave";
|
|
CHANNEL_EVENTS["access_token"] = "access_token";
|
|
})(CHANNEL_EVENTS || (CHANNEL_EVENTS = {}));
|
|
var TRANSPORTS;
|
|
(function (TRANSPORTS) {
|
|
TRANSPORTS["websocket"] = "websocket";
|
|
})(TRANSPORTS || (TRANSPORTS = {}));
|
|
var CONNECTION_STATE;
|
|
(function (CONNECTION_STATE) {
|
|
CONNECTION_STATE["Connecting"] = "connecting";
|
|
CONNECTION_STATE["Open"] = "open";
|
|
CONNECTION_STATE["Closing"] = "closing";
|
|
CONNECTION_STATE["Closed"] = "closed";
|
|
})(CONNECTION_STATE || (CONNECTION_STATE = {}));
|
|
|
|
// This file draws heavily from https://github.com/phoenixframework/phoenix/commit/cf098e9cf7a44ee6479d31d911a97d3c7430c6fe
|
|
// License: https://github.com/phoenixframework/phoenix/blob/master/LICENSE.md
|
|
class Serializer {
|
|
constructor() {
|
|
this.HEADER_LENGTH = 1;
|
|
}
|
|
decode(rawPayload, callback) {
|
|
if (rawPayload.constructor === ArrayBuffer) {
|
|
return callback(this._binaryDecode(rawPayload));
|
|
}
|
|
if (typeof rawPayload === 'string') {
|
|
return callback(JSON.parse(rawPayload));
|
|
}
|
|
return callback({});
|
|
}
|
|
_binaryDecode(buffer) {
|
|
const view = new DataView(buffer);
|
|
const decoder = new TextDecoder();
|
|
return this._decodeBroadcast(buffer, view, decoder);
|
|
}
|
|
_decodeBroadcast(buffer, view, decoder) {
|
|
const topicSize = view.getUint8(1);
|
|
const eventSize = view.getUint8(2);
|
|
let offset = this.HEADER_LENGTH + 2;
|
|
const topic = decoder.decode(buffer.slice(offset, offset + topicSize));
|
|
offset = offset + topicSize;
|
|
const event = decoder.decode(buffer.slice(offset, offset + eventSize));
|
|
offset = offset + eventSize;
|
|
const data = JSON.parse(decoder.decode(buffer.slice(offset, buffer.byteLength)));
|
|
return { ref: null, topic: topic, event: event, payload: data };
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Creates a timer that accepts a `timerCalc` function to perform calculated timeout retries, such as exponential backoff.
|
|
*
|
|
* @example
|
|
* let reconnectTimer = new Timer(() => this.connect(), function(tries){
|
|
* return [1000, 5000, 10000][tries - 1] || 10000
|
|
* })
|
|
* reconnectTimer.scheduleTimeout() // fires after 1000
|
|
* reconnectTimer.scheduleTimeout() // fires after 5000
|
|
* reconnectTimer.reset()
|
|
* reconnectTimer.scheduleTimeout() // fires after 1000
|
|
*/
|
|
class Timer {
|
|
constructor(callback, timerCalc) {
|
|
this.callback = callback;
|
|
this.timerCalc = timerCalc;
|
|
this.timer = undefined;
|
|
this.tries = 0;
|
|
this.callback = callback;
|
|
this.timerCalc = timerCalc;
|
|
}
|
|
reset() {
|
|
this.tries = 0;
|
|
clearTimeout(this.timer);
|
|
this.timer = undefined;
|
|
}
|
|
// Cancels any previous scheduleTimeout and schedules callback
|
|
scheduleTimeout() {
|
|
clearTimeout(this.timer);
|
|
this.timer = setTimeout(() => {
|
|
this.tries = this.tries + 1;
|
|
this.callback();
|
|
}, this.timerCalc(this.tries + 1));
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Helpers to convert the change Payload into native JS types.
|
|
*/
|
|
// Adapted from epgsql (src/epgsql_binary.erl), this module licensed under
|
|
// 3-clause BSD found here: https://raw.githubusercontent.com/epgsql/epgsql/devel/LICENSE
|
|
var PostgresTypes;
|
|
(function (PostgresTypes) {
|
|
PostgresTypes["abstime"] = "abstime";
|
|
PostgresTypes["bool"] = "bool";
|
|
PostgresTypes["date"] = "date";
|
|
PostgresTypes["daterange"] = "daterange";
|
|
PostgresTypes["float4"] = "float4";
|
|
PostgresTypes["float8"] = "float8";
|
|
PostgresTypes["int2"] = "int2";
|
|
PostgresTypes["int4"] = "int4";
|
|
PostgresTypes["int4range"] = "int4range";
|
|
PostgresTypes["int8"] = "int8";
|
|
PostgresTypes["int8range"] = "int8range";
|
|
PostgresTypes["json"] = "json";
|
|
PostgresTypes["jsonb"] = "jsonb";
|
|
PostgresTypes["money"] = "money";
|
|
PostgresTypes["numeric"] = "numeric";
|
|
PostgresTypes["oid"] = "oid";
|
|
PostgresTypes["reltime"] = "reltime";
|
|
PostgresTypes["text"] = "text";
|
|
PostgresTypes["time"] = "time";
|
|
PostgresTypes["timestamp"] = "timestamp";
|
|
PostgresTypes["timestamptz"] = "timestamptz";
|
|
PostgresTypes["timetz"] = "timetz";
|
|
PostgresTypes["tsrange"] = "tsrange";
|
|
PostgresTypes["tstzrange"] = "tstzrange";
|
|
})(PostgresTypes || (PostgresTypes = {}));
|
|
/**
|
|
* Takes an array of columns and an object of string values then converts each string value
|
|
* to its mapped type.
|
|
*
|
|
* @param {{name: String, type: String}[]} columns
|
|
* @param {Object} record
|
|
* @param {Object} options The map of various options that can be applied to the mapper
|
|
* @param {Array} options.skipTypes The array of types that should not be converted
|
|
*
|
|
* @example convertChangeData([{name: 'first_name', type: 'text'}, {name: 'age', type: 'int4'}], {first_name: 'Paul', age:'33'}, {})
|
|
* //=>{ first_name: 'Paul', age: 33 }
|
|
*/
|
|
const convertChangeData = (columns, record, options = {}) => {
|
|
var _a;
|
|
const skipTypes = (_a = options.skipTypes) !== null && _a !== void 0 ? _a : [];
|
|
return Object.keys(record).reduce((acc, rec_key) => {
|
|
acc[rec_key] = convertColumn(rec_key, columns, record, skipTypes);
|
|
return acc;
|
|
}, {});
|
|
};
|
|
/**
|
|
* Converts the value of an individual column.
|
|
*
|
|
* @param {String} columnName The column that you want to convert
|
|
* @param {{name: String, type: String}[]} columns All of the columns
|
|
* @param {Object} record The map of string values
|
|
* @param {Array} skipTypes An array of types that should not be converted
|
|
* @return {object} Useless information
|
|
*
|
|
* @example convertColumn('age', [{name: 'first_name', type: 'text'}, {name: 'age', type: 'int4'}], {first_name: 'Paul', age: '33'}, [])
|
|
* //=> 33
|
|
* @example convertColumn('age', [{name: 'first_name', type: 'text'}, {name: 'age', type: 'int4'}], {first_name: 'Paul', age: '33'}, ['int4'])
|
|
* //=> "33"
|
|
*/
|
|
const convertColumn = (columnName, columns, record, skipTypes) => {
|
|
const column = columns.find((x) => x.name === columnName);
|
|
const colType = column === null || column === void 0 ? void 0 : column.type;
|
|
const value = record[columnName];
|
|
if (colType && !skipTypes.includes(colType)) {
|
|
return convertCell(colType, value);
|
|
}
|
|
return noop$2(value);
|
|
};
|
|
/**
|
|
* If the value of the cell is `null`, returns null.
|
|
* Otherwise converts the string value to the correct type.
|
|
* @param {String} type A postgres column type
|
|
* @param {String} value The cell value
|
|
*
|
|
* @example convertCell('bool', 't')
|
|
* //=> true
|
|
* @example convertCell('int8', '10')
|
|
* //=> 10
|
|
* @example convertCell('_int4', '{1,2,3,4}')
|
|
* //=> [1,2,3,4]
|
|
*/
|
|
const convertCell = (type, value) => {
|
|
// if data type is an array
|
|
if (type.charAt(0) === '_') {
|
|
const dataType = type.slice(1, type.length);
|
|
return toArray$1(value, dataType);
|
|
}
|
|
// If not null, convert to correct type.
|
|
switch (type) {
|
|
case PostgresTypes.bool:
|
|
return toBoolean(value);
|
|
case PostgresTypes.float4:
|
|
case PostgresTypes.float8:
|
|
case PostgresTypes.int2:
|
|
case PostgresTypes.int4:
|
|
case PostgresTypes.int8:
|
|
case PostgresTypes.numeric:
|
|
case PostgresTypes.oid:
|
|
return toNumber(value);
|
|
case PostgresTypes.json:
|
|
case PostgresTypes.jsonb:
|
|
return toJson(value);
|
|
case PostgresTypes.timestamp:
|
|
return toTimestampString(value); // Format to be consistent with PostgREST
|
|
case PostgresTypes.abstime: // To allow users to cast it based on Timezone
|
|
case PostgresTypes.date: // To allow users to cast it based on Timezone
|
|
case PostgresTypes.daterange:
|
|
case PostgresTypes.int4range:
|
|
case PostgresTypes.int8range:
|
|
case PostgresTypes.money:
|
|
case PostgresTypes.reltime: // To allow users to cast it based on Timezone
|
|
case PostgresTypes.text:
|
|
case PostgresTypes.time: // To allow users to cast it based on Timezone
|
|
case PostgresTypes.timestamptz: // To allow users to cast it based on Timezone
|
|
case PostgresTypes.timetz: // To allow users to cast it based on Timezone
|
|
case PostgresTypes.tsrange:
|
|
case PostgresTypes.tstzrange:
|
|
return noop$2(value);
|
|
default:
|
|
// Return the value for remaining types
|
|
return noop$2(value);
|
|
}
|
|
};
|
|
const noop$2 = (value) => {
|
|
return value;
|
|
};
|
|
const toBoolean = (value) => {
|
|
switch (value) {
|
|
case 't':
|
|
return true;
|
|
case 'f':
|
|
return false;
|
|
default:
|
|
return value;
|
|
}
|
|
};
|
|
const toNumber = (value) => {
|
|
if (typeof value === 'string') {
|
|
const parsedValue = parseFloat(value);
|
|
if (!Number.isNaN(parsedValue)) {
|
|
return parsedValue;
|
|
}
|
|
}
|
|
return value;
|
|
};
|
|
const toJson = (value) => {
|
|
if (typeof value === 'string') {
|
|
try {
|
|
return JSON.parse(value);
|
|
}
|
|
catch (error) {
|
|
console.log(`JSON parse error: ${error}`);
|
|
return value;
|
|
}
|
|
}
|
|
return value;
|
|
};
|
|
/**
|
|
* Converts a Postgres Array into a native JS array
|
|
*
|
|
* @example toArray('{}', 'int4')
|
|
* //=> []
|
|
* @example toArray('{"[2021-01-01,2021-12-31)","(2021-01-01,2021-12-32]"}', 'daterange')
|
|
* //=> ['[2021-01-01,2021-12-31)', '(2021-01-01,2021-12-32]']
|
|
* @example toArray([1,2,3,4], 'int4')
|
|
* //=> [1,2,3,4]
|
|
*/
|
|
const toArray$1 = (value, type) => {
|
|
if (typeof value !== 'string') {
|
|
return value;
|
|
}
|
|
const lastIdx = value.length - 1;
|
|
const closeBrace = value[lastIdx];
|
|
const openBrace = value[0];
|
|
// Confirm value is a Postgres array by checking curly brackets
|
|
if (openBrace === '{' && closeBrace === '}') {
|
|
let arr;
|
|
const valTrim = value.slice(1, lastIdx);
|
|
// TODO: find a better solution to separate Postgres array data
|
|
try {
|
|
arr = JSON.parse('[' + valTrim + ']');
|
|
}
|
|
catch (_) {
|
|
// WARNING: splitting on comma does not cover all edge cases
|
|
arr = valTrim ? valTrim.split(',') : [];
|
|
}
|
|
return arr.map((val) => convertCell(type, val));
|
|
}
|
|
return value;
|
|
};
|
|
/**
|
|
* Fixes timestamp to be ISO-8601. Swaps the space between the date and time for a 'T'
|
|
* See https://github.com/supabase/supabase/issues/18
|
|
*
|
|
* @example toTimestampString('2019-09-10 00:00:00')
|
|
* //=> '2019-09-10T00:00:00'
|
|
*/
|
|
const toTimestampString = (value) => {
|
|
if (typeof value === 'string') {
|
|
return value.replace(' ', 'T');
|
|
}
|
|
return value;
|
|
};
|
|
const httpEndpointURL = (socketUrl) => {
|
|
let url = socketUrl;
|
|
url = url.replace(/^ws/i, 'http');
|
|
url = url.replace(/(\/socket\/websocket|\/socket|\/websocket)\/?$/i, '');
|
|
return url.replace(/\/+$/, '') + '/api/broadcast';
|
|
};
|
|
|
|
class Push {
|
|
/**
|
|
* Initializes the Push
|
|
*
|
|
* @param channel The Channel
|
|
* @param event The event, for example `"phx_join"`
|
|
* @param payload The payload, for example `{user_id: 123}`
|
|
* @param timeout The push timeout in milliseconds
|
|
*/
|
|
constructor(channel, event, payload = {}, timeout = DEFAULT_TIMEOUT) {
|
|
this.channel = channel;
|
|
this.event = event;
|
|
this.payload = payload;
|
|
this.timeout = timeout;
|
|
this.sent = false;
|
|
this.timeoutTimer = undefined;
|
|
this.ref = '';
|
|
this.receivedResp = null;
|
|
this.recHooks = [];
|
|
this.refEvent = null;
|
|
}
|
|
resend(timeout) {
|
|
this.timeout = timeout;
|
|
this._cancelRefEvent();
|
|
this.ref = '';
|
|
this.refEvent = null;
|
|
this.receivedResp = null;
|
|
this.sent = false;
|
|
this.send();
|
|
}
|
|
send() {
|
|
if (this._hasReceived('timeout')) {
|
|
return;
|
|
}
|
|
this.startTimeout();
|
|
this.sent = true;
|
|
this.channel.socket.push({
|
|
topic: this.channel.topic,
|
|
event: this.event,
|
|
payload: this.payload,
|
|
ref: this.ref,
|
|
join_ref: this.channel._joinRef(),
|
|
});
|
|
}
|
|
updatePayload(payload) {
|
|
this.payload = Object.assign(Object.assign({}, this.payload), payload);
|
|
}
|
|
receive(status, callback) {
|
|
var _a;
|
|
if (this._hasReceived(status)) {
|
|
callback((_a = this.receivedResp) === null || _a === void 0 ? void 0 : _a.response);
|
|
}
|
|
this.recHooks.push({ status, callback });
|
|
return this;
|
|
}
|
|
startTimeout() {
|
|
if (this.timeoutTimer) {
|
|
return;
|
|
}
|
|
this.ref = this.channel.socket._makeRef();
|
|
this.refEvent = this.channel._replyEventName(this.ref);
|
|
const callback = (payload) => {
|
|
this._cancelRefEvent();
|
|
this._cancelTimeout();
|
|
this.receivedResp = payload;
|
|
this._matchReceive(payload);
|
|
};
|
|
this.channel._on(this.refEvent, {}, callback);
|
|
this.timeoutTimer = setTimeout(() => {
|
|
this.trigger('timeout', {});
|
|
}, this.timeout);
|
|
}
|
|
trigger(status, response) {
|
|
if (this.refEvent)
|
|
this.channel._trigger(this.refEvent, { status, response });
|
|
}
|
|
destroy() {
|
|
this._cancelRefEvent();
|
|
this._cancelTimeout();
|
|
}
|
|
_cancelRefEvent() {
|
|
if (!this.refEvent) {
|
|
return;
|
|
}
|
|
this.channel._off(this.refEvent, {});
|
|
}
|
|
_cancelTimeout() {
|
|
clearTimeout(this.timeoutTimer);
|
|
this.timeoutTimer = undefined;
|
|
}
|
|
_matchReceive({ status, response, }) {
|
|
this.recHooks
|
|
.filter((h) => h.status === status)
|
|
.forEach((h) => h.callback(response));
|
|
}
|
|
_hasReceived(status) {
|
|
return this.receivedResp && this.receivedResp.status === status;
|
|
}
|
|
}
|
|
|
|
/*
|
|
This file draws heavily from https://github.com/phoenixframework/phoenix/blob/d344ec0a732ab4ee204215b31de69cf4be72e3bf/assets/js/phoenix/presence.js
|
|
License: https://github.com/phoenixframework/phoenix/blob/d344ec0a732ab4ee204215b31de69cf4be72e3bf/LICENSE.md
|
|
*/
|
|
var REALTIME_PRESENCE_LISTEN_EVENTS;
|
|
(function (REALTIME_PRESENCE_LISTEN_EVENTS) {
|
|
REALTIME_PRESENCE_LISTEN_EVENTS["SYNC"] = "sync";
|
|
REALTIME_PRESENCE_LISTEN_EVENTS["JOIN"] = "join";
|
|
REALTIME_PRESENCE_LISTEN_EVENTS["LEAVE"] = "leave";
|
|
})(REALTIME_PRESENCE_LISTEN_EVENTS || (REALTIME_PRESENCE_LISTEN_EVENTS = {}));
|
|
class RealtimePresence {
|
|
/**
|
|
* Initializes the Presence.
|
|
*
|
|
* @param channel - The RealtimeChannel
|
|
* @param opts - The options,
|
|
* for example `{events: {state: 'state', diff: 'diff'}}`
|
|
*/
|
|
constructor(channel, opts) {
|
|
this.channel = channel;
|
|
this.state = {};
|
|
this.pendingDiffs = [];
|
|
this.joinRef = null;
|
|
this.enabled = false;
|
|
this.caller = {
|
|
onJoin: () => { },
|
|
onLeave: () => { },
|
|
onSync: () => { },
|
|
};
|
|
const events = (opts === null || opts === void 0 ? void 0 : opts.events) || {
|
|
state: 'presence_state',
|
|
diff: 'presence_diff',
|
|
};
|
|
this.channel._on(events.state, {}, (newState) => {
|
|
const { onJoin, onLeave, onSync } = this.caller;
|
|
this.joinRef = this.channel._joinRef();
|
|
this.state = RealtimePresence.syncState(this.state, newState, onJoin, onLeave);
|
|
this.pendingDiffs.forEach((diff) => {
|
|
this.state = RealtimePresence.syncDiff(this.state, diff, onJoin, onLeave);
|
|
});
|
|
this.pendingDiffs = [];
|
|
onSync();
|
|
});
|
|
this.channel._on(events.diff, {}, (diff) => {
|
|
const { onJoin, onLeave, onSync } = this.caller;
|
|
if (this.inPendingSyncState()) {
|
|
this.pendingDiffs.push(diff);
|
|
}
|
|
else {
|
|
this.state = RealtimePresence.syncDiff(this.state, diff, onJoin, onLeave);
|
|
onSync();
|
|
}
|
|
});
|
|
this.onJoin((key, currentPresences, newPresences) => {
|
|
this.channel._trigger('presence', {
|
|
event: 'join',
|
|
key,
|
|
currentPresences,
|
|
newPresences,
|
|
});
|
|
});
|
|
this.onLeave((key, currentPresences, leftPresences) => {
|
|
this.channel._trigger('presence', {
|
|
event: 'leave',
|
|
key,
|
|
currentPresences,
|
|
leftPresences,
|
|
});
|
|
});
|
|
this.onSync(() => {
|
|
this.channel._trigger('presence', { event: 'sync' });
|
|
});
|
|
}
|
|
/**
|
|
* Used to sync the list of presences on the server with the
|
|
* client's state.
|
|
*
|
|
* An optional `onJoin` and `onLeave` callback can be provided to
|
|
* react to changes in the client's local presences across
|
|
* disconnects and reconnects with the server.
|
|
*
|
|
* @internal
|
|
*/
|
|
static syncState(currentState, newState, onJoin, onLeave) {
|
|
const state = this.cloneDeep(currentState);
|
|
const transformedState = this.transformState(newState);
|
|
const joins = {};
|
|
const leaves = {};
|
|
this.map(state, (key, presences) => {
|
|
if (!transformedState[key]) {
|
|
leaves[key] = presences;
|
|
}
|
|
});
|
|
this.map(transformedState, (key, newPresences) => {
|
|
const currentPresences = state[key];
|
|
if (currentPresences) {
|
|
const newPresenceRefs = newPresences.map((m) => m.presence_ref);
|
|
const curPresenceRefs = currentPresences.map((m) => m.presence_ref);
|
|
const joinedPresences = newPresences.filter((m) => curPresenceRefs.indexOf(m.presence_ref) < 0);
|
|
const leftPresences = currentPresences.filter((m) => newPresenceRefs.indexOf(m.presence_ref) < 0);
|
|
if (joinedPresences.length > 0) {
|
|
joins[key] = joinedPresences;
|
|
}
|
|
if (leftPresences.length > 0) {
|
|
leaves[key] = leftPresences;
|
|
}
|
|
}
|
|
else {
|
|
joins[key] = newPresences;
|
|
}
|
|
});
|
|
return this.syncDiff(state, { joins, leaves }, onJoin, onLeave);
|
|
}
|
|
/**
|
|
* Used to sync a diff of presence join and leave events from the
|
|
* server, as they happen.
|
|
*
|
|
* Like `syncState`, `syncDiff` accepts optional `onJoin` and
|
|
* `onLeave` callbacks to react to a user joining or leaving from a
|
|
* device.
|
|
*
|
|
* @internal
|
|
*/
|
|
static syncDiff(state, diff, onJoin, onLeave) {
|
|
const { joins, leaves } = {
|
|
joins: this.transformState(diff.joins),
|
|
leaves: this.transformState(diff.leaves),
|
|
};
|
|
if (!onJoin) {
|
|
onJoin = () => { };
|
|
}
|
|
if (!onLeave) {
|
|
onLeave = () => { };
|
|
}
|
|
this.map(joins, (key, newPresences) => {
|
|
var _a;
|
|
const currentPresences = (_a = state[key]) !== null && _a !== void 0 ? _a : [];
|
|
state[key] = this.cloneDeep(newPresences);
|
|
if (currentPresences.length > 0) {
|
|
const joinedPresenceRefs = state[key].map((m) => m.presence_ref);
|
|
const curPresences = currentPresences.filter((m) => joinedPresenceRefs.indexOf(m.presence_ref) < 0);
|
|
state[key].unshift(...curPresences);
|
|
}
|
|
onJoin(key, currentPresences, newPresences);
|
|
});
|
|
this.map(leaves, (key, leftPresences) => {
|
|
let currentPresences = state[key];
|
|
if (!currentPresences)
|
|
return;
|
|
const presenceRefsToRemove = leftPresences.map((m) => m.presence_ref);
|
|
currentPresences = currentPresences.filter((m) => presenceRefsToRemove.indexOf(m.presence_ref) < 0);
|
|
state[key] = currentPresences;
|
|
onLeave(key, currentPresences, leftPresences);
|
|
if (currentPresences.length === 0)
|
|
delete state[key];
|
|
});
|
|
return state;
|
|
}
|
|
/** @internal */
|
|
static map(obj, func) {
|
|
return Object.getOwnPropertyNames(obj).map((key) => func(key, obj[key]));
|
|
}
|
|
/**
|
|
* Remove 'metas' key
|
|
* Change 'phx_ref' to 'presence_ref'
|
|
* Remove 'phx_ref' and 'phx_ref_prev'
|
|
*
|
|
* @example
|
|
* // returns {
|
|
* abc123: [
|
|
* { presence_ref: '2', user_id: 1 },
|
|
* { presence_ref: '3', user_id: 2 }
|
|
* ]
|
|
* }
|
|
* RealtimePresence.transformState({
|
|
* abc123: {
|
|
* metas: [
|
|
* { phx_ref: '2', phx_ref_prev: '1' user_id: 1 },
|
|
* { phx_ref: '3', user_id: 2 }
|
|
* ]
|
|
* }
|
|
* })
|
|
*
|
|
* @internal
|
|
*/
|
|
static transformState(state) {
|
|
state = this.cloneDeep(state);
|
|
return Object.getOwnPropertyNames(state).reduce((newState, key) => {
|
|
const presences = state[key];
|
|
if ('metas' in presences) {
|
|
newState[key] = presences.metas.map((presence) => {
|
|
presence['presence_ref'] = presence['phx_ref'];
|
|
delete presence['phx_ref'];
|
|
delete presence['phx_ref_prev'];
|
|
return presence;
|
|
});
|
|
}
|
|
else {
|
|
newState[key] = presences;
|
|
}
|
|
return newState;
|
|
}, {});
|
|
}
|
|
/** @internal */
|
|
static cloneDeep(obj) {
|
|
return JSON.parse(JSON.stringify(obj));
|
|
}
|
|
/** @internal */
|
|
onJoin(callback) {
|
|
this.caller.onJoin = callback;
|
|
}
|
|
/** @internal */
|
|
onLeave(callback) {
|
|
this.caller.onLeave = callback;
|
|
}
|
|
/** @internal */
|
|
onSync(callback) {
|
|
this.caller.onSync = callback;
|
|
}
|
|
/** @internal */
|
|
inPendingSyncState() {
|
|
return !this.joinRef || this.joinRef !== this.channel._joinRef();
|
|
}
|
|
}
|
|
|
|
var REALTIME_POSTGRES_CHANGES_LISTEN_EVENT;
|
|
(function (REALTIME_POSTGRES_CHANGES_LISTEN_EVENT) {
|
|
REALTIME_POSTGRES_CHANGES_LISTEN_EVENT["ALL"] = "*";
|
|
REALTIME_POSTGRES_CHANGES_LISTEN_EVENT["INSERT"] = "INSERT";
|
|
REALTIME_POSTGRES_CHANGES_LISTEN_EVENT["UPDATE"] = "UPDATE";
|
|
REALTIME_POSTGRES_CHANGES_LISTEN_EVENT["DELETE"] = "DELETE";
|
|
})(REALTIME_POSTGRES_CHANGES_LISTEN_EVENT || (REALTIME_POSTGRES_CHANGES_LISTEN_EVENT = {}));
|
|
var REALTIME_LISTEN_TYPES;
|
|
(function (REALTIME_LISTEN_TYPES) {
|
|
REALTIME_LISTEN_TYPES["BROADCAST"] = "broadcast";
|
|
REALTIME_LISTEN_TYPES["PRESENCE"] = "presence";
|
|
REALTIME_LISTEN_TYPES["POSTGRES_CHANGES"] = "postgres_changes";
|
|
REALTIME_LISTEN_TYPES["SYSTEM"] = "system";
|
|
})(REALTIME_LISTEN_TYPES || (REALTIME_LISTEN_TYPES = {}));
|
|
var REALTIME_SUBSCRIBE_STATES;
|
|
(function (REALTIME_SUBSCRIBE_STATES) {
|
|
REALTIME_SUBSCRIBE_STATES["SUBSCRIBED"] = "SUBSCRIBED";
|
|
REALTIME_SUBSCRIBE_STATES["TIMED_OUT"] = "TIMED_OUT";
|
|
REALTIME_SUBSCRIBE_STATES["CLOSED"] = "CLOSED";
|
|
REALTIME_SUBSCRIBE_STATES["CHANNEL_ERROR"] = "CHANNEL_ERROR";
|
|
})(REALTIME_SUBSCRIBE_STATES || (REALTIME_SUBSCRIBE_STATES = {}));
|
|
/** A channel is the basic building block of Realtime
|
|
* and narrows the scope of data flow to subscribed clients.
|
|
* You can think of a channel as a chatroom where participants are able to see who's online
|
|
* and send and receive messages.
|
|
*/
|
|
class RealtimeChannel {
|
|
constructor(
|
|
/** Topic name can be any string. */
|
|
topic, params = { config: {} }, socket) {
|
|
this.topic = topic;
|
|
this.params = params;
|
|
this.socket = socket;
|
|
this.bindings = {};
|
|
this.state = CHANNEL_STATES.closed;
|
|
this.joinedOnce = false;
|
|
this.pushBuffer = [];
|
|
this.subTopic = topic.replace(/^realtime:/i, '');
|
|
this.params.config = Object.assign({
|
|
broadcast: { ack: false, self: false },
|
|
presence: { key: '', enabled: false },
|
|
private: false,
|
|
}, params.config);
|
|
this.timeout = this.socket.timeout;
|
|
this.joinPush = new Push(this, CHANNEL_EVENTS.join, this.params, this.timeout);
|
|
this.rejoinTimer = new Timer(() => this._rejoinUntilConnected(), this.socket.reconnectAfterMs);
|
|
this.joinPush.receive('ok', () => {
|
|
this.state = CHANNEL_STATES.joined;
|
|
this.rejoinTimer.reset();
|
|
this.pushBuffer.forEach((pushEvent) => pushEvent.send());
|
|
this.pushBuffer = [];
|
|
});
|
|
this._onClose(() => {
|
|
this.rejoinTimer.reset();
|
|
this.socket.log('channel', `close ${this.topic} ${this._joinRef()}`);
|
|
this.state = CHANNEL_STATES.closed;
|
|
this.socket._remove(this);
|
|
});
|
|
this._onError((reason) => {
|
|
if (this._isLeaving() || this._isClosed()) {
|
|
return;
|
|
}
|
|
this.socket.log('channel', `error ${this.topic}`, reason);
|
|
this.state = CHANNEL_STATES.errored;
|
|
this.rejoinTimer.scheduleTimeout();
|
|
});
|
|
this.joinPush.receive('timeout', () => {
|
|
if (!this._isJoining()) {
|
|
return;
|
|
}
|
|
this.socket.log('channel', `timeout ${this.topic}`, this.joinPush.timeout);
|
|
this.state = CHANNEL_STATES.errored;
|
|
this.rejoinTimer.scheduleTimeout();
|
|
});
|
|
this.joinPush.receive('error', (reason) => {
|
|
if (this._isLeaving() || this._isClosed()) {
|
|
return;
|
|
}
|
|
this.socket.log('channel', `error ${this.topic}`, reason);
|
|
this.state = CHANNEL_STATES.errored;
|
|
this.rejoinTimer.scheduleTimeout();
|
|
});
|
|
this._on(CHANNEL_EVENTS.reply, {}, (payload, ref) => {
|
|
this._trigger(this._replyEventName(ref), payload);
|
|
});
|
|
this.presence = new RealtimePresence(this);
|
|
this.broadcastEndpointURL = httpEndpointURL(this.socket.endPoint);
|
|
this.private = this.params.config.private || false;
|
|
}
|
|
/** Subscribe registers your client with the server */
|
|
subscribe(callback, timeout = this.timeout) {
|
|
var _a, _b;
|
|
if (!this.socket.isConnected()) {
|
|
this.socket.connect();
|
|
}
|
|
if (this.state == CHANNEL_STATES.closed) {
|
|
const { config: { broadcast, presence, private: isPrivate }, } = this.params;
|
|
const postgres_changes = (_b = (_a = this.bindings.postgres_changes) === null || _a === void 0 ? void 0 : _a.map((r) => r.filter)) !== null && _b !== void 0 ? _b : [];
|
|
const presence_enabled = !!this.bindings[REALTIME_LISTEN_TYPES.PRESENCE] &&
|
|
this.bindings[REALTIME_LISTEN_TYPES.PRESENCE].length > 0;
|
|
const accessTokenPayload = {};
|
|
const config = {
|
|
broadcast,
|
|
presence: Object.assign(Object.assign({}, presence), { enabled: presence_enabled }),
|
|
postgres_changes,
|
|
private: isPrivate,
|
|
};
|
|
if (this.socket.accessTokenValue) {
|
|
accessTokenPayload.access_token = this.socket.accessTokenValue;
|
|
}
|
|
this._onError((e) => callback === null || callback === void 0 ? void 0 : callback(REALTIME_SUBSCRIBE_STATES.CHANNEL_ERROR, e));
|
|
this._onClose(() => callback === null || callback === void 0 ? void 0 : callback(REALTIME_SUBSCRIBE_STATES.CLOSED));
|
|
this.updateJoinPayload(Object.assign({ config }, accessTokenPayload));
|
|
this.joinedOnce = true;
|
|
this._rejoin(timeout);
|
|
this.joinPush
|
|
.receive('ok', async ({ postgres_changes }) => {
|
|
var _a;
|
|
this.socket.setAuth();
|
|
if (postgres_changes === undefined) {
|
|
callback === null || callback === void 0 ? void 0 : callback(REALTIME_SUBSCRIBE_STATES.SUBSCRIBED);
|
|
return;
|
|
}
|
|
else {
|
|
const clientPostgresBindings = this.bindings.postgres_changes;
|
|
const bindingsLen = (_a = clientPostgresBindings === null || clientPostgresBindings === void 0 ? void 0 : clientPostgresBindings.length) !== null && _a !== void 0 ? _a : 0;
|
|
const newPostgresBindings = [];
|
|
for (let i = 0; i < bindingsLen; i++) {
|
|
const clientPostgresBinding = clientPostgresBindings[i];
|
|
const { filter: { event, schema, table, filter }, } = clientPostgresBinding;
|
|
const serverPostgresFilter = postgres_changes && postgres_changes[i];
|
|
if (serverPostgresFilter &&
|
|
serverPostgresFilter.event === event &&
|
|
serverPostgresFilter.schema === schema &&
|
|
serverPostgresFilter.table === table &&
|
|
serverPostgresFilter.filter === filter) {
|
|
newPostgresBindings.push(Object.assign(Object.assign({}, clientPostgresBinding), { id: serverPostgresFilter.id }));
|
|
}
|
|
else {
|
|
this.unsubscribe();
|
|
this.state = CHANNEL_STATES.errored;
|
|
callback === null || callback === void 0 ? void 0 : callback(REALTIME_SUBSCRIBE_STATES.CHANNEL_ERROR, new Error('mismatch between server and client bindings for postgres changes'));
|
|
return;
|
|
}
|
|
}
|
|
this.bindings.postgres_changes = newPostgresBindings;
|
|
callback && callback(REALTIME_SUBSCRIBE_STATES.SUBSCRIBED);
|
|
return;
|
|
}
|
|
})
|
|
.receive('error', (error) => {
|
|
this.state = CHANNEL_STATES.errored;
|
|
callback === null || callback === void 0 ? void 0 : callback(REALTIME_SUBSCRIBE_STATES.CHANNEL_ERROR, new Error(JSON.stringify(Object.values(error).join(', ') || 'error')));
|
|
return;
|
|
})
|
|
.receive('timeout', () => {
|
|
callback === null || callback === void 0 ? void 0 : callback(REALTIME_SUBSCRIBE_STATES.TIMED_OUT);
|
|
return;
|
|
});
|
|
}
|
|
return this;
|
|
}
|
|
presenceState() {
|
|
return this.presence.state;
|
|
}
|
|
async track(payload, opts = {}) {
|
|
return await this.send({
|
|
type: 'presence',
|
|
event: 'track',
|
|
payload,
|
|
}, opts.timeout || this.timeout);
|
|
}
|
|
async untrack(opts = {}) {
|
|
return await this.send({
|
|
type: 'presence',
|
|
event: 'untrack',
|
|
}, opts);
|
|
}
|
|
on(type, filter, callback) {
|
|
if (this.state === CHANNEL_STATES.joined &&
|
|
type === REALTIME_LISTEN_TYPES.PRESENCE) {
|
|
this.socket.log('channel', `resubscribe to ${this.topic} due to change in presence callbacks on joined channel`);
|
|
this.unsubscribe().then(() => this.subscribe());
|
|
}
|
|
return this._on(type, filter, callback);
|
|
}
|
|
/**
|
|
* Sends a message into the channel.
|
|
*
|
|
* @param args Arguments to send to channel
|
|
* @param args.type The type of event to send
|
|
* @param args.event The name of the event being sent
|
|
* @param args.payload Payload to be sent
|
|
* @param opts Options to be used during the send process
|
|
*/
|
|
async send(args, opts = {}) {
|
|
var _a, _b;
|
|
if (!this._canPush() && args.type === 'broadcast') {
|
|
const { event, payload: endpoint_payload } = args;
|
|
const authorization = this.socket.accessTokenValue
|
|
? `Bearer ${this.socket.accessTokenValue}`
|
|
: '';
|
|
const options = {
|
|
method: 'POST',
|
|
headers: {
|
|
Authorization: authorization,
|
|
apikey: this.socket.apiKey ? this.socket.apiKey : '',
|
|
'Content-Type': 'application/json',
|
|
},
|
|
body: JSON.stringify({
|
|
messages: [
|
|
{
|
|
topic: this.subTopic,
|
|
event,
|
|
payload: endpoint_payload,
|
|
private: this.private,
|
|
},
|
|
],
|
|
}),
|
|
};
|
|
try {
|
|
const response = await this._fetchWithTimeout(this.broadcastEndpointURL, options, (_a = opts.timeout) !== null && _a !== void 0 ? _a : this.timeout);
|
|
await ((_b = response.body) === null || _b === void 0 ? void 0 : _b.cancel());
|
|
return response.ok ? 'ok' : 'error';
|
|
}
|
|
catch (error) {
|
|
if (error.name === 'AbortError') {
|
|
return 'timed out';
|
|
}
|
|
else {
|
|
return 'error';
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
return new Promise((resolve) => {
|
|
var _a, _b, _c;
|
|
const push = this._push(args.type, args, opts.timeout || this.timeout);
|
|
if (args.type === 'broadcast' && !((_c = (_b = (_a = this.params) === null || _a === void 0 ? void 0 : _a.config) === null || _b === void 0 ? void 0 : _b.broadcast) === null || _c === void 0 ? void 0 : _c.ack)) {
|
|
resolve('ok');
|
|
}
|
|
push.receive('ok', () => resolve('ok'));
|
|
push.receive('error', () => resolve('error'));
|
|
push.receive('timeout', () => resolve('timed out'));
|
|
});
|
|
}
|
|
}
|
|
updateJoinPayload(payload) {
|
|
this.joinPush.updatePayload(payload);
|
|
}
|
|
/**
|
|
* Leaves the channel.
|
|
*
|
|
* Unsubscribes from server events, and instructs channel to terminate on server.
|
|
* Triggers onClose() hooks.
|
|
*
|
|
* To receive leave acknowledgements, use the a `receive` hook to bind to the server ack, ie:
|
|
* channel.unsubscribe().receive("ok", () => alert("left!") )
|
|
*/
|
|
unsubscribe(timeout = this.timeout) {
|
|
this.state = CHANNEL_STATES.leaving;
|
|
const onClose = () => {
|
|
this.socket.log('channel', `leave ${this.topic}`);
|
|
this._trigger(CHANNEL_EVENTS.close, 'leave', this._joinRef());
|
|
};
|
|
this.joinPush.destroy();
|
|
let leavePush = null;
|
|
return new Promise((resolve) => {
|
|
leavePush = new Push(this, CHANNEL_EVENTS.leave, {}, timeout);
|
|
leavePush
|
|
.receive('ok', () => {
|
|
onClose();
|
|
resolve('ok');
|
|
})
|
|
.receive('timeout', () => {
|
|
onClose();
|
|
resolve('timed out');
|
|
})
|
|
.receive('error', () => {
|
|
resolve('error');
|
|
});
|
|
leavePush.send();
|
|
if (!this._canPush()) {
|
|
leavePush.trigger('ok', {});
|
|
}
|
|
}).finally(() => {
|
|
leavePush === null || leavePush === void 0 ? void 0 : leavePush.destroy();
|
|
});
|
|
}
|
|
/**
|
|
* Teardown the channel.
|
|
*
|
|
* Destroys and stops related timers.
|
|
*/
|
|
teardown() {
|
|
this.pushBuffer.forEach((push) => push.destroy());
|
|
this.pushBuffer = [];
|
|
this.rejoinTimer.reset();
|
|
this.joinPush.destroy();
|
|
this.state = CHANNEL_STATES.closed;
|
|
this.bindings = {};
|
|
}
|
|
/** @internal */
|
|
async _fetchWithTimeout(url, options, timeout) {
|
|
const controller = new AbortController();
|
|
const id = setTimeout(() => controller.abort(), timeout);
|
|
const response = await this.socket.fetch(url, Object.assign(Object.assign({}, options), { signal: controller.signal }));
|
|
clearTimeout(id);
|
|
return response;
|
|
}
|
|
/** @internal */
|
|
_push(event, payload, timeout = this.timeout) {
|
|
if (!this.joinedOnce) {
|
|
throw `tried to push '${event}' to '${this.topic}' before joining. Use channel.subscribe() before pushing events`;
|
|
}
|
|
let pushEvent = new Push(this, event, payload, timeout);
|
|
if (this._canPush()) {
|
|
pushEvent.send();
|
|
}
|
|
else {
|
|
this._addToPushBuffer(pushEvent);
|
|
}
|
|
return pushEvent;
|
|
}
|
|
/** @internal */
|
|
_addToPushBuffer(pushEvent) {
|
|
pushEvent.startTimeout();
|
|
this.pushBuffer.push(pushEvent);
|
|
// Enforce buffer size limit
|
|
if (this.pushBuffer.length > MAX_PUSH_BUFFER_SIZE) {
|
|
const removedPush = this.pushBuffer.shift();
|
|
if (removedPush) {
|
|
removedPush.destroy();
|
|
this.socket.log('channel', `discarded push due to buffer overflow: ${removedPush.event}`, removedPush.payload);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Overridable message hook
|
|
*
|
|
* Receives all events for specialized message handling before dispatching to the channel callbacks.
|
|
* Must return the payload, modified or unmodified.
|
|
*
|
|
* @internal
|
|
*/
|
|
_onMessage(_event, payload, _ref) {
|
|
return payload;
|
|
}
|
|
/** @internal */
|
|
_isMember(topic) {
|
|
return this.topic === topic;
|
|
}
|
|
/** @internal */
|
|
_joinRef() {
|
|
return this.joinPush.ref;
|
|
}
|
|
/** @internal */
|
|
_trigger(type, payload, ref) {
|
|
var _a, _b;
|
|
const typeLower = type.toLocaleLowerCase();
|
|
const { close, error, leave, join } = CHANNEL_EVENTS;
|
|
const events = [close, error, leave, join];
|
|
if (ref && events.indexOf(typeLower) >= 0 && ref !== this._joinRef()) {
|
|
return;
|
|
}
|
|
let handledPayload = this._onMessage(typeLower, payload, ref);
|
|
if (payload && !handledPayload) {
|
|
throw 'channel onMessage callbacks must return the payload, modified or unmodified';
|
|
}
|
|
if (['insert', 'update', 'delete'].includes(typeLower)) {
|
|
(_a = this.bindings.postgres_changes) === null || _a === void 0 ? void 0 : _a.filter((bind) => {
|
|
var _a, _b, _c;
|
|
return (((_a = bind.filter) === null || _a === void 0 ? void 0 : _a.event) === '*' ||
|
|
((_c = (_b = bind.filter) === null || _b === void 0 ? void 0 : _b.event) === null || _c === void 0 ? void 0 : _c.toLocaleLowerCase()) === typeLower);
|
|
}).map((bind) => bind.callback(handledPayload, ref));
|
|
}
|
|
else {
|
|
(_b = this.bindings[typeLower]) === null || _b === void 0 ? void 0 : _b.filter((bind) => {
|
|
var _a, _b, _c, _d, _e, _f;
|
|
if (['broadcast', 'presence', 'postgres_changes'].includes(typeLower)) {
|
|
if ('id' in bind) {
|
|
const bindId = bind.id;
|
|
const bindEvent = (_a = bind.filter) === null || _a === void 0 ? void 0 : _a.event;
|
|
return (bindId &&
|
|
((_b = payload.ids) === null || _b === void 0 ? void 0 : _b.includes(bindId)) &&
|
|
(bindEvent === '*' ||
|
|
(bindEvent === null || bindEvent === void 0 ? void 0 : bindEvent.toLocaleLowerCase()) ===
|
|
((_c = payload.data) === null || _c === void 0 ? void 0 : _c.type.toLocaleLowerCase())));
|
|
}
|
|
else {
|
|
const bindEvent = (_e = (_d = bind === null || bind === void 0 ? void 0 : bind.filter) === null || _d === void 0 ? void 0 : _d.event) === null || _e === void 0 ? void 0 : _e.toLocaleLowerCase();
|
|
return (bindEvent === '*' ||
|
|
bindEvent === ((_f = payload === null || payload === void 0 ? void 0 : payload.event) === null || _f === void 0 ? void 0 : _f.toLocaleLowerCase()));
|
|
}
|
|
}
|
|
else {
|
|
return bind.type.toLocaleLowerCase() === typeLower;
|
|
}
|
|
}).map((bind) => {
|
|
if (typeof handledPayload === 'object' && 'ids' in handledPayload) {
|
|
const postgresChanges = handledPayload.data;
|
|
const { schema, table, commit_timestamp, type, errors } = postgresChanges;
|
|
const enrichedPayload = {
|
|
schema: schema,
|
|
table: table,
|
|
commit_timestamp: commit_timestamp,
|
|
eventType: type,
|
|
new: {},
|
|
old: {},
|
|
errors: errors,
|
|
};
|
|
handledPayload = Object.assign(Object.assign({}, enrichedPayload), this._getPayloadRecords(postgresChanges));
|
|
}
|
|
bind.callback(handledPayload, ref);
|
|
});
|
|
}
|
|
}
|
|
/** @internal */
|
|
_isClosed() {
|
|
return this.state === CHANNEL_STATES.closed;
|
|
}
|
|
/** @internal */
|
|
_isJoined() {
|
|
return this.state === CHANNEL_STATES.joined;
|
|
}
|
|
/** @internal */
|
|
_isJoining() {
|
|
return this.state === CHANNEL_STATES.joining;
|
|
}
|
|
/** @internal */
|
|
_isLeaving() {
|
|
return this.state === CHANNEL_STATES.leaving;
|
|
}
|
|
/** @internal */
|
|
_replyEventName(ref) {
|
|
return `chan_reply_${ref}`;
|
|
}
|
|
/** @internal */
|
|
_on(type, filter, callback) {
|
|
const typeLower = type.toLocaleLowerCase();
|
|
const binding = {
|
|
type: typeLower,
|
|
filter: filter,
|
|
callback: callback,
|
|
};
|
|
if (this.bindings[typeLower]) {
|
|
this.bindings[typeLower].push(binding);
|
|
}
|
|
else {
|
|
this.bindings[typeLower] = [binding];
|
|
}
|
|
return this;
|
|
}
|
|
/** @internal */
|
|
_off(type, filter) {
|
|
const typeLower = type.toLocaleLowerCase();
|
|
if (this.bindings[typeLower]) {
|
|
this.bindings[typeLower] = this.bindings[typeLower].filter((bind) => {
|
|
var _a;
|
|
return !(((_a = bind.type) === null || _a === void 0 ? void 0 : _a.toLocaleLowerCase()) === typeLower &&
|
|
RealtimeChannel.isEqual(bind.filter, filter));
|
|
});
|
|
}
|
|
return this;
|
|
}
|
|
/** @internal */
|
|
static isEqual(obj1, obj2) {
|
|
if (Object.keys(obj1).length !== Object.keys(obj2).length) {
|
|
return false;
|
|
}
|
|
for (const k in obj1) {
|
|
if (obj1[k] !== obj2[k]) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
/** @internal */
|
|
_rejoinUntilConnected() {
|
|
this.rejoinTimer.scheduleTimeout();
|
|
if (this.socket.isConnected()) {
|
|
this._rejoin();
|
|
}
|
|
}
|
|
/**
|
|
* Registers a callback that will be executed when the channel closes.
|
|
*
|
|
* @internal
|
|
*/
|
|
_onClose(callback) {
|
|
this._on(CHANNEL_EVENTS.close, {}, callback);
|
|
}
|
|
/**
|
|
* Registers a callback that will be executed when the channel encounteres an error.
|
|
*
|
|
* @internal
|
|
*/
|
|
_onError(callback) {
|
|
this._on(CHANNEL_EVENTS.error, {}, (reason) => callback(reason));
|
|
}
|
|
/**
|
|
* Returns `true` if the socket is connected and the channel has been joined.
|
|
*
|
|
* @internal
|
|
*/
|
|
_canPush() {
|
|
return this.socket.isConnected() && this._isJoined();
|
|
}
|
|
/** @internal */
|
|
_rejoin(timeout = this.timeout) {
|
|
if (this._isLeaving()) {
|
|
return;
|
|
}
|
|
this.socket._leaveOpenTopic(this.topic);
|
|
this.state = CHANNEL_STATES.joining;
|
|
this.joinPush.resend(timeout);
|
|
}
|
|
/** @internal */
|
|
_getPayloadRecords(payload) {
|
|
const records = {
|
|
new: {},
|
|
old: {},
|
|
};
|
|
if (payload.type === 'INSERT' || payload.type === 'UPDATE') {
|
|
records.new = convertChangeData(payload.columns, payload.record);
|
|
}
|
|
if (payload.type === 'UPDATE' || payload.type === 'DELETE') {
|
|
records.old = convertChangeData(payload.columns, payload.old_record);
|
|
}
|
|
return records;
|
|
}
|
|
}
|
|
|
|
const noop$1 = () => { };
|
|
// Connection-related constants
|
|
const CONNECTION_TIMEOUTS = {
|
|
HEARTBEAT_INTERVAL: 25000,
|
|
RECONNECT_DELAY: 10,
|
|
HEARTBEAT_TIMEOUT_FALLBACK: 100,
|
|
};
|
|
const RECONNECT_INTERVALS = [1000, 2000, 5000, 10000];
|
|
const DEFAULT_RECONNECT_FALLBACK = 10000;
|
|
const WORKER_SCRIPT = `
|
|
addEventListener("message", (e) => {
|
|
if (e.data.event === "start") {
|
|
setInterval(() => postMessage({ event: "keepAlive" }), e.data.interval);
|
|
}
|
|
});`;
|
|
class RealtimeClient {
|
|
/**
|
|
* Initializes the Socket.
|
|
*
|
|
* @param endPoint The string WebSocket endpoint, ie, "ws://example.com/socket", "wss://example.com", "/socket" (inherited host & protocol)
|
|
* @param httpEndpoint The string HTTP endpoint, ie, "https://example.com", "/" (inherited host & protocol)
|
|
* @param options.transport The Websocket Transport, for example WebSocket. This can be a custom implementation
|
|
* @param options.timeout The default timeout in milliseconds to trigger push timeouts.
|
|
* @param options.params The optional params to pass when connecting.
|
|
* @param options.headers Deprecated: headers cannot be set on websocket connections and this option will be removed in the future.
|
|
* @param options.heartbeatIntervalMs The millisec interval to send a heartbeat message.
|
|
* @param options.logger The optional function for specialized logging, ie: logger: (kind, msg, data) => { console.log(`${kind}: ${msg}`, data) }
|
|
* @param options.logLevel Sets the log level for Realtime
|
|
* @param options.encode The function to encode outgoing messages. Defaults to JSON: (payload, callback) => callback(JSON.stringify(payload))
|
|
* @param options.decode The function to decode incoming messages. Defaults to Serializer's decode.
|
|
* @param options.reconnectAfterMs he optional function that returns the millsec reconnect interval. Defaults to stepped backoff off.
|
|
* @param options.worker Use Web Worker to set a side flow. Defaults to false.
|
|
* @param options.workerUrl The URL of the worker script. Defaults to https://realtime.supabase.com/worker.js that includes a heartbeat event call to keep the connection alive.
|
|
*/
|
|
constructor(endPoint, options) {
|
|
var _a;
|
|
this.accessTokenValue = null;
|
|
this.apiKey = null;
|
|
this.channels = new Array();
|
|
this.endPoint = '';
|
|
this.httpEndpoint = '';
|
|
/** @deprecated headers cannot be set on websocket connections */
|
|
this.headers = {};
|
|
this.params = {};
|
|
this.timeout = DEFAULT_TIMEOUT;
|
|
this.transport = null;
|
|
this.heartbeatIntervalMs = CONNECTION_TIMEOUTS.HEARTBEAT_INTERVAL;
|
|
this.heartbeatTimer = undefined;
|
|
this.pendingHeartbeatRef = null;
|
|
this.heartbeatCallback = noop$1;
|
|
this.ref = 0;
|
|
this.reconnectTimer = null;
|
|
this.logger = noop$1;
|
|
this.conn = null;
|
|
this.sendBuffer = [];
|
|
this.serializer = new Serializer();
|
|
this.stateChangeCallbacks = {
|
|
open: [],
|
|
close: [],
|
|
error: [],
|
|
message: [],
|
|
};
|
|
this.accessToken = null;
|
|
this._connectionState = 'disconnected';
|
|
this._wasManualDisconnect = false;
|
|
this._authPromise = null;
|
|
/**
|
|
* Use either custom fetch, if provided, or default fetch to make HTTP requests
|
|
*
|
|
* @internal
|
|
*/
|
|
this._resolveFetch = (customFetch) => {
|
|
let _fetch;
|
|
if (customFetch) {
|
|
_fetch = customFetch;
|
|
}
|
|
else if (typeof fetch === 'undefined') {
|
|
// Node.js environment without native fetch
|
|
_fetch = (...args) => __vitePreload(async () => { const {default: fetch} = await Promise.resolve().then(() => browser);return { default: fetch }},true?void 0:void 0)
|
|
.then(({ default: fetch }) => fetch(...args))
|
|
.catch((error) => {
|
|
throw new Error(`Failed to load @supabase/node-fetch: ${error.message}. ` +
|
|
`This is required for HTTP requests in Node.js environments without native fetch.`);
|
|
});
|
|
}
|
|
else {
|
|
_fetch = fetch;
|
|
}
|
|
return (...args) => _fetch(...args);
|
|
};
|
|
// Validate required parameters
|
|
if (!((_a = options === null || options === void 0 ? void 0 : options.params) === null || _a === void 0 ? void 0 : _a.apikey)) {
|
|
throw new Error('API key is required to connect to Realtime');
|
|
}
|
|
this.apiKey = options.params.apikey;
|
|
// Initialize endpoint URLs
|
|
this.endPoint = `${endPoint}/${TRANSPORTS.websocket}`;
|
|
this.httpEndpoint = httpEndpointURL(endPoint);
|
|
this._initializeOptions(options);
|
|
this._setupReconnectionTimer();
|
|
this.fetch = this._resolveFetch(options === null || options === void 0 ? void 0 : options.fetch);
|
|
}
|
|
/**
|
|
* Connects the socket, unless already connected.
|
|
*/
|
|
connect() {
|
|
// Skip if already connecting, disconnecting, or connected
|
|
if (this.isConnecting() ||
|
|
this.isDisconnecting() ||
|
|
(this.conn !== null && this.isConnected())) {
|
|
return;
|
|
}
|
|
this._setConnectionState('connecting');
|
|
this._setAuthSafely('connect');
|
|
// Establish WebSocket connection
|
|
if (this.transport) {
|
|
// Use custom transport if provided
|
|
this.conn = new this.transport(this.endpointURL());
|
|
}
|
|
else {
|
|
// Try to use native WebSocket
|
|
try {
|
|
this.conn = WebSocketFactory.createWebSocket(this.endpointURL());
|
|
}
|
|
catch (error) {
|
|
this._setConnectionState('disconnected');
|
|
const errorMessage = error.message;
|
|
// Provide helpful error message based on environment
|
|
if (errorMessage.includes('Node.js')) {
|
|
throw new Error(`${errorMessage}\n\n` +
|
|
'To use Realtime in Node.js, you need to provide a WebSocket implementation:\n\n' +
|
|
'Option 1: Use Node.js 22+ which has native WebSocket support\n' +
|
|
'Option 2: Install and provide the "ws" package:\n\n' +
|
|
' npm install ws\n\n' +
|
|
' import ws from "ws"\n' +
|
|
' const client = new RealtimeClient(url, {\n' +
|
|
' ...options,\n' +
|
|
' transport: ws\n' +
|
|
' })');
|
|
}
|
|
throw new Error(`WebSocket not available: ${errorMessage}`);
|
|
}
|
|
}
|
|
this._setupConnectionHandlers();
|
|
}
|
|
/**
|
|
* Returns the URL of the websocket.
|
|
* @returns string The URL of the websocket.
|
|
*/
|
|
endpointURL() {
|
|
return this._appendParams(this.endPoint, Object.assign({}, this.params, { vsn: VSN }));
|
|
}
|
|
/**
|
|
* Disconnects the socket.
|
|
*
|
|
* @param code A numeric status code to send on disconnect.
|
|
* @param reason A custom reason for the disconnect.
|
|
*/
|
|
disconnect(code, reason) {
|
|
if (this.isDisconnecting()) {
|
|
return;
|
|
}
|
|
this._setConnectionState('disconnecting', true);
|
|
if (this.conn) {
|
|
// Setup fallback timer to prevent hanging in disconnecting state
|
|
const fallbackTimer = setTimeout(() => {
|
|
this._setConnectionState('disconnected');
|
|
}, 100);
|
|
this.conn.onclose = () => {
|
|
clearTimeout(fallbackTimer);
|
|
this._setConnectionState('disconnected');
|
|
};
|
|
// Close the WebSocket connection
|
|
if (code) {
|
|
this.conn.close(code, reason !== null && reason !== void 0 ? reason : '');
|
|
}
|
|
else {
|
|
this.conn.close();
|
|
}
|
|
this._teardownConnection();
|
|
}
|
|
else {
|
|
this._setConnectionState('disconnected');
|
|
}
|
|
}
|
|
/**
|
|
* Returns all created channels
|
|
*/
|
|
getChannels() {
|
|
return this.channels;
|
|
}
|
|
/**
|
|
* Unsubscribes and removes a single channel
|
|
* @param channel A RealtimeChannel instance
|
|
*/
|
|
async removeChannel(channel) {
|
|
const status = await channel.unsubscribe();
|
|
if (this.channels.length === 0) {
|
|
this.disconnect();
|
|
}
|
|
return status;
|
|
}
|
|
/**
|
|
* Unsubscribes and removes all channels
|
|
*/
|
|
async removeAllChannels() {
|
|
const values_1 = await Promise.all(this.channels.map((channel) => channel.unsubscribe()));
|
|
this.channels = [];
|
|
this.disconnect();
|
|
return values_1;
|
|
}
|
|
/**
|
|
* Logs the message.
|
|
*
|
|
* For customized logging, `this.logger` can be overridden.
|
|
*/
|
|
log(kind, msg, data) {
|
|
this.logger(kind, msg, data);
|
|
}
|
|
/**
|
|
* Returns the current state of the socket.
|
|
*/
|
|
connectionState() {
|
|
switch (this.conn && this.conn.readyState) {
|
|
case SOCKET_STATES.connecting:
|
|
return CONNECTION_STATE.Connecting;
|
|
case SOCKET_STATES.open:
|
|
return CONNECTION_STATE.Open;
|
|
case SOCKET_STATES.closing:
|
|
return CONNECTION_STATE.Closing;
|
|
default:
|
|
return CONNECTION_STATE.Closed;
|
|
}
|
|
}
|
|
/**
|
|
* Returns `true` is the connection is open.
|
|
*/
|
|
isConnected() {
|
|
return this.connectionState() === CONNECTION_STATE.Open;
|
|
}
|
|
/**
|
|
* Returns `true` if the connection is currently connecting.
|
|
*/
|
|
isConnecting() {
|
|
return this._connectionState === 'connecting';
|
|
}
|
|
/**
|
|
* Returns `true` if the connection is currently disconnecting.
|
|
*/
|
|
isDisconnecting() {
|
|
return this._connectionState === 'disconnecting';
|
|
}
|
|
channel(topic, params = { config: {} }) {
|
|
const realtimeTopic = `realtime:${topic}`;
|
|
const exists = this.getChannels().find((c) => c.topic === realtimeTopic);
|
|
if (!exists) {
|
|
const chan = new RealtimeChannel(`realtime:${topic}`, params, this);
|
|
this.channels.push(chan);
|
|
return chan;
|
|
}
|
|
else {
|
|
return exists;
|
|
}
|
|
}
|
|
/**
|
|
* Push out a message if the socket is connected.
|
|
*
|
|
* If the socket is not connected, the message gets enqueued within a local buffer, and sent out when a connection is next established.
|
|
*/
|
|
push(data) {
|
|
const { topic, event, payload, ref } = data;
|
|
const callback = () => {
|
|
this.encode(data, (result) => {
|
|
var _a;
|
|
(_a = this.conn) === null || _a === void 0 ? void 0 : _a.send(result);
|
|
});
|
|
};
|
|
this.log('push', `${topic} ${event} (${ref})`, payload);
|
|
if (this.isConnected()) {
|
|
callback();
|
|
}
|
|
else {
|
|
this.sendBuffer.push(callback);
|
|
}
|
|
}
|
|
/**
|
|
* Sets the JWT access token used for channel subscription authorization and Realtime RLS.
|
|
*
|
|
* If param is null it will use the `accessToken` callback function or the token set on the client.
|
|
*
|
|
* On callback used, it will set the value of the token internal to the client.
|
|
*
|
|
* @param token A JWT string to override the token set on the client.
|
|
*/
|
|
async setAuth(token = null) {
|
|
this._authPromise = this._performAuth(token);
|
|
try {
|
|
await this._authPromise;
|
|
}
|
|
finally {
|
|
this._authPromise = null;
|
|
}
|
|
}
|
|
/**
|
|
* Sends a heartbeat message if the socket is connected.
|
|
*/
|
|
async sendHeartbeat() {
|
|
var _a;
|
|
if (!this.isConnected()) {
|
|
this.heartbeatCallback('disconnected');
|
|
return;
|
|
}
|
|
// Handle heartbeat timeout and force reconnection if needed
|
|
if (this.pendingHeartbeatRef) {
|
|
this.pendingHeartbeatRef = null;
|
|
this.log('transport', 'heartbeat timeout. Attempting to re-establish connection');
|
|
this.heartbeatCallback('timeout');
|
|
// Force reconnection after heartbeat timeout
|
|
this._wasManualDisconnect = false;
|
|
(_a = this.conn) === null || _a === void 0 ? void 0 : _a.close(WS_CLOSE_NORMAL, 'heartbeat timeout');
|
|
setTimeout(() => {
|
|
var _a;
|
|
if (!this.isConnected()) {
|
|
(_a = this.reconnectTimer) === null || _a === void 0 ? void 0 : _a.scheduleTimeout();
|
|
}
|
|
}, CONNECTION_TIMEOUTS.HEARTBEAT_TIMEOUT_FALLBACK);
|
|
return;
|
|
}
|
|
// Send heartbeat message to server
|
|
this.pendingHeartbeatRef = this._makeRef();
|
|
this.push({
|
|
topic: 'phoenix',
|
|
event: 'heartbeat',
|
|
payload: {},
|
|
ref: this.pendingHeartbeatRef,
|
|
});
|
|
this.heartbeatCallback('sent');
|
|
this._setAuthSafely('heartbeat');
|
|
}
|
|
onHeartbeat(callback) {
|
|
this.heartbeatCallback = callback;
|
|
}
|
|
/**
|
|
* Flushes send buffer
|
|
*/
|
|
flushSendBuffer() {
|
|
if (this.isConnected() && this.sendBuffer.length > 0) {
|
|
this.sendBuffer.forEach((callback) => callback());
|
|
this.sendBuffer = [];
|
|
}
|
|
}
|
|
/**
|
|
* Return the next message ref, accounting for overflows
|
|
*
|
|
* @internal
|
|
*/
|
|
_makeRef() {
|
|
let newRef = this.ref + 1;
|
|
if (newRef === this.ref) {
|
|
this.ref = 0;
|
|
}
|
|
else {
|
|
this.ref = newRef;
|
|
}
|
|
return this.ref.toString();
|
|
}
|
|
/**
|
|
* Unsubscribe from channels with the specified topic.
|
|
*
|
|
* @internal
|
|
*/
|
|
_leaveOpenTopic(topic) {
|
|
let dupChannel = this.channels.find((c) => c.topic === topic && (c._isJoined() || c._isJoining()));
|
|
if (dupChannel) {
|
|
this.log('transport', `leaving duplicate topic "${topic}"`);
|
|
dupChannel.unsubscribe();
|
|
}
|
|
}
|
|
/**
|
|
* Removes a subscription from the socket.
|
|
*
|
|
* @param channel An open subscription.
|
|
*
|
|
* @internal
|
|
*/
|
|
_remove(channel) {
|
|
this.channels = this.channels.filter((c) => c.topic !== channel.topic);
|
|
}
|
|
/** @internal */
|
|
_onConnMessage(rawMessage) {
|
|
this.decode(rawMessage.data, (msg) => {
|
|
// Handle heartbeat responses
|
|
if (msg.topic === 'phoenix' && msg.event === 'phx_reply') {
|
|
this.heartbeatCallback(msg.payload.status === 'ok' ? 'ok' : 'error');
|
|
}
|
|
// Handle pending heartbeat reference cleanup
|
|
if (msg.ref && msg.ref === this.pendingHeartbeatRef) {
|
|
this.pendingHeartbeatRef = null;
|
|
}
|
|
// Log incoming message
|
|
const { topic, event, payload, ref } = msg;
|
|
const refString = ref ? `(${ref})` : '';
|
|
const status = payload.status || '';
|
|
this.log('receive', `${status} ${topic} ${event} ${refString}`.trim(), payload);
|
|
// Route message to appropriate channels
|
|
this.channels
|
|
.filter((channel) => channel._isMember(topic))
|
|
.forEach((channel) => channel._trigger(event, payload, ref));
|
|
this._triggerStateCallbacks('message', msg);
|
|
});
|
|
}
|
|
/**
|
|
* Clear specific timer
|
|
* @internal
|
|
*/
|
|
_clearTimer(timer) {
|
|
var _a;
|
|
if (timer === 'heartbeat' && this.heartbeatTimer) {
|
|
clearInterval(this.heartbeatTimer);
|
|
this.heartbeatTimer = undefined;
|
|
}
|
|
else if (timer === 'reconnect') {
|
|
(_a = this.reconnectTimer) === null || _a === void 0 ? void 0 : _a.reset();
|
|
}
|
|
}
|
|
/**
|
|
* Clear all timers
|
|
* @internal
|
|
*/
|
|
_clearAllTimers() {
|
|
this._clearTimer('heartbeat');
|
|
this._clearTimer('reconnect');
|
|
}
|
|
/**
|
|
* Setup connection handlers for WebSocket events
|
|
* @internal
|
|
*/
|
|
_setupConnectionHandlers() {
|
|
if (!this.conn)
|
|
return;
|
|
// Set binary type if supported (browsers and most WebSocket implementations)
|
|
if ('binaryType' in this.conn) {
|
|
this.conn.binaryType = 'arraybuffer';
|
|
}
|
|
this.conn.onopen = () => this._onConnOpen();
|
|
this.conn.onerror = (error) => this._onConnError(error);
|
|
this.conn.onmessage = (event) => this._onConnMessage(event);
|
|
this.conn.onclose = (event) => this._onConnClose(event);
|
|
}
|
|
/**
|
|
* Teardown connection and cleanup resources
|
|
* @internal
|
|
*/
|
|
_teardownConnection() {
|
|
if (this.conn) {
|
|
this.conn.onopen = null;
|
|
this.conn.onerror = null;
|
|
this.conn.onmessage = null;
|
|
this.conn.onclose = null;
|
|
this.conn = null;
|
|
}
|
|
this._clearAllTimers();
|
|
this.channels.forEach((channel) => channel.teardown());
|
|
}
|
|
/** @internal */
|
|
_onConnOpen() {
|
|
this._setConnectionState('connected');
|
|
this.log('transport', `connected to ${this.endpointURL()}`);
|
|
this.flushSendBuffer();
|
|
this._clearTimer('reconnect');
|
|
if (!this.worker) {
|
|
this._startHeartbeat();
|
|
}
|
|
else {
|
|
if (!this.workerRef) {
|
|
this._startWorkerHeartbeat();
|
|
}
|
|
}
|
|
this._triggerStateCallbacks('open');
|
|
}
|
|
/** @internal */
|
|
_startHeartbeat() {
|
|
this.heartbeatTimer && clearInterval(this.heartbeatTimer);
|
|
this.heartbeatTimer = setInterval(() => this.sendHeartbeat(), this.heartbeatIntervalMs);
|
|
}
|
|
/** @internal */
|
|
_startWorkerHeartbeat() {
|
|
if (this.workerUrl) {
|
|
this.log('worker', `starting worker for from ${this.workerUrl}`);
|
|
}
|
|
else {
|
|
this.log('worker', `starting default worker`);
|
|
}
|
|
const objectUrl = this._workerObjectUrl(this.workerUrl);
|
|
this.workerRef = new Worker(objectUrl);
|
|
this.workerRef.onerror = (error) => {
|
|
this.log('worker', 'worker error', error.message);
|
|
this.workerRef.terminate();
|
|
};
|
|
this.workerRef.onmessage = (event) => {
|
|
if (event.data.event === 'keepAlive') {
|
|
this.sendHeartbeat();
|
|
}
|
|
};
|
|
this.workerRef.postMessage({
|
|
event: 'start',
|
|
interval: this.heartbeatIntervalMs,
|
|
});
|
|
}
|
|
/** @internal */
|
|
_onConnClose(event) {
|
|
var _a;
|
|
this._setConnectionState('disconnected');
|
|
this.log('transport', 'close', event);
|
|
this._triggerChanError();
|
|
this._clearTimer('heartbeat');
|
|
// Only schedule reconnection if it wasn't a manual disconnect
|
|
if (!this._wasManualDisconnect) {
|
|
(_a = this.reconnectTimer) === null || _a === void 0 ? void 0 : _a.scheduleTimeout();
|
|
}
|
|
this._triggerStateCallbacks('close', event);
|
|
}
|
|
/** @internal */
|
|
_onConnError(error) {
|
|
this._setConnectionState('disconnected');
|
|
this.log('transport', `${error}`);
|
|
this._triggerChanError();
|
|
this._triggerStateCallbacks('error', error);
|
|
}
|
|
/** @internal */
|
|
_triggerChanError() {
|
|
this.channels.forEach((channel) => channel._trigger(CHANNEL_EVENTS.error));
|
|
}
|
|
/** @internal */
|
|
_appendParams(url, params) {
|
|
if (Object.keys(params).length === 0) {
|
|
return url;
|
|
}
|
|
const prefix = url.match(/\?/) ? '&' : '?';
|
|
const query = new URLSearchParams(params);
|
|
return `${url}${prefix}${query}`;
|
|
}
|
|
_workerObjectUrl(url) {
|
|
let result_url;
|
|
if (url) {
|
|
result_url = url;
|
|
}
|
|
else {
|
|
const blob = new Blob([WORKER_SCRIPT], { type: 'application/javascript' });
|
|
result_url = URL.createObjectURL(blob);
|
|
}
|
|
return result_url;
|
|
}
|
|
/**
|
|
* Set connection state with proper state management
|
|
* @internal
|
|
*/
|
|
_setConnectionState(state, manual = false) {
|
|
this._connectionState = state;
|
|
if (state === 'connecting') {
|
|
this._wasManualDisconnect = false;
|
|
}
|
|
else if (state === 'disconnecting') {
|
|
this._wasManualDisconnect = manual;
|
|
}
|
|
}
|
|
/**
|
|
* Perform the actual auth operation
|
|
* @internal
|
|
*/
|
|
async _performAuth(token = null) {
|
|
let tokenToSend;
|
|
if (token) {
|
|
tokenToSend = token;
|
|
}
|
|
else if (this.accessToken) {
|
|
// Always call the accessToken callback to get fresh token
|
|
tokenToSend = await this.accessToken();
|
|
}
|
|
else {
|
|
tokenToSend = this.accessTokenValue;
|
|
}
|
|
if (this.accessTokenValue != tokenToSend) {
|
|
this.accessTokenValue = tokenToSend;
|
|
this.channels.forEach((channel) => {
|
|
const payload = {
|
|
access_token: tokenToSend,
|
|
version: DEFAULT_VERSION,
|
|
};
|
|
tokenToSend && channel.updateJoinPayload(payload);
|
|
if (channel.joinedOnce && channel._isJoined()) {
|
|
channel._push(CHANNEL_EVENTS.access_token, {
|
|
access_token: tokenToSend,
|
|
});
|
|
}
|
|
});
|
|
}
|
|
}
|
|
/**
|
|
* Wait for any in-flight auth operations to complete
|
|
* @internal
|
|
*/
|
|
async _waitForAuthIfNeeded() {
|
|
if (this._authPromise) {
|
|
await this._authPromise;
|
|
}
|
|
}
|
|
/**
|
|
* Safely call setAuth with standardized error handling
|
|
* @internal
|
|
*/
|
|
_setAuthSafely(context = 'general') {
|
|
this.setAuth().catch((e) => {
|
|
this.log('error', `error setting auth in ${context}`, e);
|
|
});
|
|
}
|
|
/**
|
|
* Trigger state change callbacks with proper error handling
|
|
* @internal
|
|
*/
|
|
_triggerStateCallbacks(event, data) {
|
|
try {
|
|
this.stateChangeCallbacks[event].forEach((callback) => {
|
|
try {
|
|
callback(data);
|
|
}
|
|
catch (e) {
|
|
this.log('error', `error in ${event} callback`, e);
|
|
}
|
|
});
|
|
}
|
|
catch (e) {
|
|
this.log('error', `error triggering ${event} callbacks`, e);
|
|
}
|
|
}
|
|
/**
|
|
* Setup reconnection timer with proper configuration
|
|
* @internal
|
|
*/
|
|
_setupReconnectionTimer() {
|
|
this.reconnectTimer = new Timer(async () => {
|
|
setTimeout(async () => {
|
|
await this._waitForAuthIfNeeded();
|
|
if (!this.isConnected()) {
|
|
this.connect();
|
|
}
|
|
}, CONNECTION_TIMEOUTS.RECONNECT_DELAY);
|
|
}, this.reconnectAfterMs);
|
|
}
|
|
/**
|
|
* Initialize client options with defaults
|
|
* @internal
|
|
*/
|
|
_initializeOptions(options) {
|
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
// Set defaults
|
|
this.transport = (_a = options === null || options === void 0 ? void 0 : options.transport) !== null && _a !== void 0 ? _a : null;
|
|
this.timeout = (_b = options === null || options === void 0 ? void 0 : options.timeout) !== null && _b !== void 0 ? _b : DEFAULT_TIMEOUT;
|
|
this.heartbeatIntervalMs =
|
|
(_c = options === null || options === void 0 ? void 0 : options.heartbeatIntervalMs) !== null && _c !== void 0 ? _c : CONNECTION_TIMEOUTS.HEARTBEAT_INTERVAL;
|
|
this.worker = (_d = options === null || options === void 0 ? void 0 : options.worker) !== null && _d !== void 0 ? _d : false;
|
|
this.accessToken = (_e = options === null || options === void 0 ? void 0 : options.accessToken) !== null && _e !== void 0 ? _e : null;
|
|
// Handle special cases
|
|
if (options === null || options === void 0 ? void 0 : options.params)
|
|
this.params = options.params;
|
|
if (options === null || options === void 0 ? void 0 : options.logger)
|
|
this.logger = options.logger;
|
|
if ((options === null || options === void 0 ? void 0 : options.logLevel) || (options === null || options === void 0 ? void 0 : options.log_level)) {
|
|
this.logLevel = options.logLevel || options.log_level;
|
|
this.params = Object.assign(Object.assign({}, this.params), { log_level: this.logLevel });
|
|
}
|
|
// Set up functions with defaults
|
|
this.reconnectAfterMs =
|
|
(_f = options === null || options === void 0 ? void 0 : options.reconnectAfterMs) !== null && _f !== void 0 ? _f : ((tries) => {
|
|
return RECONNECT_INTERVALS[tries - 1] || DEFAULT_RECONNECT_FALLBACK;
|
|
});
|
|
this.encode =
|
|
(_g = options === null || options === void 0 ? void 0 : options.encode) !== null && _g !== void 0 ? _g : ((payload, callback) => {
|
|
return callback(JSON.stringify(payload));
|
|
});
|
|
this.decode =
|
|
(_h = options === null || options === void 0 ? void 0 : options.decode) !== null && _h !== void 0 ? _h : this.serializer.decode.bind(this.serializer);
|
|
// Handle worker setup
|
|
if (this.worker) {
|
|
if (typeof window !== 'undefined' && !window.Worker) {
|
|
throw new Error('Web Worker is not supported');
|
|
}
|
|
this.workerUrl = options === null || options === void 0 ? void 0 : options.workerUrl;
|
|
}
|
|
}
|
|
}
|
|
|
|
class StorageError extends Error {
|
|
constructor(message) {
|
|
super(message);
|
|
this.__isStorageError = true;
|
|
this.name = 'StorageError';
|
|
}
|
|
}
|
|
function isStorageError(error) {
|
|
return typeof error === 'object' && error !== null && '__isStorageError' in error;
|
|
}
|
|
class StorageApiError extends StorageError {
|
|
constructor(message, status, statusCode) {
|
|
super(message);
|
|
this.name = 'StorageApiError';
|
|
this.status = status;
|
|
this.statusCode = statusCode;
|
|
}
|
|
toJSON() {
|
|
return {
|
|
name: this.name,
|
|
message: this.message,
|
|
status: this.status,
|
|
statusCode: this.statusCode,
|
|
};
|
|
}
|
|
}
|
|
class StorageUnknownError extends StorageError {
|
|
constructor(message, originalError) {
|
|
super(message);
|
|
this.name = 'StorageUnknownError';
|
|
this.originalError = originalError;
|
|
}
|
|
}
|
|
|
|
var __awaiter$6 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
const resolveFetch$2 = (customFetch) => {
|
|
let _fetch;
|
|
if (customFetch) {
|
|
_fetch = customFetch;
|
|
}
|
|
else if (typeof fetch === 'undefined') {
|
|
_fetch = (...args) => __vitePreload(async () => { const {default: fetch} = await Promise.resolve().then(() => browser);return { default: fetch }},true?void 0:void 0).then(({ default: fetch }) => fetch(...args));
|
|
}
|
|
else {
|
|
_fetch = fetch;
|
|
}
|
|
return (...args) => _fetch(...args);
|
|
};
|
|
const resolveResponse = () => __awaiter$6(void 0, void 0, void 0, function* () {
|
|
if (typeof Response === 'undefined') {
|
|
// @ts-ignore
|
|
return (yield __vitePreload(() => Promise.resolve().then(() => browser),true?void 0:void 0)).Response;
|
|
}
|
|
return Response;
|
|
});
|
|
const recursiveToCamel = (item) => {
|
|
if (Array.isArray(item)) {
|
|
return item.map((el) => recursiveToCamel(el));
|
|
}
|
|
else if (typeof item === 'function' || item !== Object(item)) {
|
|
return item;
|
|
}
|
|
const result = {};
|
|
Object.entries(item).forEach(([key, value]) => {
|
|
const newKey = key.replace(/([-_][a-z])/gi, (c) => c.toUpperCase().replace(/[-_]/g, ''));
|
|
result[newKey] = recursiveToCamel(value);
|
|
});
|
|
return result;
|
|
};
|
|
/**
|
|
* Determine if input is a plain object
|
|
* An object is plain if it's created by either {}, new Object(), or Object.create(null)
|
|
* source: https://github.com/sindresorhus/is-plain-obj
|
|
*/
|
|
const isPlainObject$1 = (value) => {
|
|
if (typeof value !== 'object' || value === null) {
|
|
return false;
|
|
}
|
|
const prototype = Object.getPrototypeOf(value);
|
|
return ((prototype === null ||
|
|
prototype === Object.prototype ||
|
|
Object.getPrototypeOf(prototype) === null) &&
|
|
!(Symbol.toStringTag in value) &&
|
|
!(Symbol.iterator in value));
|
|
};
|
|
|
|
var __awaiter$5 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
const _getErrorMessage$1 = (err) => err.msg || err.message || err.error_description || err.error || JSON.stringify(err);
|
|
const handleError$1 = (error, reject, options) => __awaiter$5(void 0, void 0, void 0, function* () {
|
|
const Res = yield resolveResponse();
|
|
if (error instanceof Res && !(options === null || options === void 0 ? void 0 : options.noResolveJson)) {
|
|
error
|
|
.json()
|
|
.then((err) => {
|
|
const status = error.status || 500;
|
|
const statusCode = (err === null || err === void 0 ? void 0 : err.statusCode) || status + '';
|
|
reject(new StorageApiError(_getErrorMessage$1(err), status, statusCode));
|
|
})
|
|
.catch((err) => {
|
|
reject(new StorageUnknownError(_getErrorMessage$1(err), err));
|
|
});
|
|
}
|
|
else {
|
|
reject(new StorageUnknownError(_getErrorMessage$1(error), error));
|
|
}
|
|
});
|
|
const _getRequestParams$1 = (method, options, parameters, body) => {
|
|
const params = { method, headers: (options === null || options === void 0 ? void 0 : options.headers) || {} };
|
|
if (method === 'GET' || !body) {
|
|
return params;
|
|
}
|
|
if (isPlainObject$1(body)) {
|
|
params.headers = Object.assign({ 'Content-Type': 'application/json' }, options === null || options === void 0 ? void 0 : options.headers);
|
|
params.body = JSON.stringify(body);
|
|
}
|
|
else {
|
|
params.body = body;
|
|
}
|
|
if (options === null || options === void 0 ? void 0 : options.duplex) {
|
|
params.duplex = options.duplex;
|
|
}
|
|
return Object.assign(Object.assign({}, params), parameters);
|
|
};
|
|
function _handleRequest$1(fetcher, method, url, options, parameters, body) {
|
|
return __awaiter$5(this, void 0, void 0, function* () {
|
|
return new Promise((resolve, reject) => {
|
|
fetcher(url, _getRequestParams$1(method, options, parameters, body))
|
|
.then((result) => {
|
|
if (!result.ok)
|
|
throw result;
|
|
if (options === null || options === void 0 ? void 0 : options.noResolveJson)
|
|
return result;
|
|
return result.json();
|
|
})
|
|
.then((data) => resolve(data))
|
|
.catch((error) => handleError$1(error, reject, options));
|
|
});
|
|
});
|
|
}
|
|
function get(fetcher, url, options, parameters) {
|
|
return __awaiter$5(this, void 0, void 0, function* () {
|
|
return _handleRequest$1(fetcher, 'GET', url, options, parameters);
|
|
});
|
|
}
|
|
function post(fetcher, url, body, options, parameters) {
|
|
return __awaiter$5(this, void 0, void 0, function* () {
|
|
return _handleRequest$1(fetcher, 'POST', url, options, parameters, body);
|
|
});
|
|
}
|
|
function put(fetcher, url, body, options, parameters) {
|
|
return __awaiter$5(this, void 0, void 0, function* () {
|
|
return _handleRequest$1(fetcher, 'PUT', url, options, parameters, body);
|
|
});
|
|
}
|
|
function head(fetcher, url, options, parameters) {
|
|
return __awaiter$5(this, void 0, void 0, function* () {
|
|
return _handleRequest$1(fetcher, 'HEAD', url, Object.assign(Object.assign({}, options), { noResolveJson: true }), parameters);
|
|
});
|
|
}
|
|
function remove(fetcher, url, body, options, parameters) {
|
|
return __awaiter$5(this, void 0, void 0, function* () {
|
|
return _handleRequest$1(fetcher, 'DELETE', url, options, parameters, body);
|
|
});
|
|
}
|
|
|
|
var __awaiter$4 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
const DEFAULT_SEARCH_OPTIONS = {
|
|
limit: 100,
|
|
offset: 0,
|
|
sortBy: {
|
|
column: 'name',
|
|
order: 'asc',
|
|
},
|
|
};
|
|
const DEFAULT_FILE_OPTIONS = {
|
|
cacheControl: '3600',
|
|
contentType: 'text/plain;charset=UTF-8',
|
|
upsert: false,
|
|
};
|
|
class StorageFileApi {
|
|
constructor(url, headers = {}, bucketId, fetch) {
|
|
this.url = url;
|
|
this.headers = headers;
|
|
this.bucketId = bucketId;
|
|
this.fetch = resolveFetch$2(fetch);
|
|
}
|
|
/**
|
|
* Uploads a file to an existing bucket or replaces an existing file at the specified path with a new one.
|
|
*
|
|
* @param method HTTP method.
|
|
* @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
|
|
* @param fileBody The body of the file to be stored in the bucket.
|
|
*/
|
|
uploadOrUpdate(method, path, fileBody, fileOptions) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
let body;
|
|
const options = Object.assign(Object.assign({}, DEFAULT_FILE_OPTIONS), fileOptions);
|
|
let headers = Object.assign(Object.assign({}, this.headers), (method === 'POST' && { 'x-upsert': String(options.upsert) }));
|
|
const metadata = options.metadata;
|
|
if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {
|
|
body = new FormData();
|
|
body.append('cacheControl', options.cacheControl);
|
|
if (metadata) {
|
|
body.append('metadata', this.encodeMetadata(metadata));
|
|
}
|
|
body.append('', fileBody);
|
|
}
|
|
else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {
|
|
body = fileBody;
|
|
body.append('cacheControl', options.cacheControl);
|
|
if (metadata) {
|
|
body.append('metadata', this.encodeMetadata(metadata));
|
|
}
|
|
}
|
|
else {
|
|
body = fileBody;
|
|
headers['cache-control'] = `max-age=${options.cacheControl}`;
|
|
headers['content-type'] = options.contentType;
|
|
if (metadata) {
|
|
headers['x-metadata'] = this.toBase64(this.encodeMetadata(metadata));
|
|
}
|
|
}
|
|
if (fileOptions === null || fileOptions === void 0 ? void 0 : fileOptions.headers) {
|
|
headers = Object.assign(Object.assign({}, headers), fileOptions.headers);
|
|
}
|
|
const cleanPath = this._removeEmptyFolders(path);
|
|
const _path = this._getFinalPath(cleanPath);
|
|
const data = yield (method == 'PUT' ? put : post)(this.fetch, `${this.url}/object/${_path}`, body, Object.assign({ headers }, ((options === null || options === void 0 ? void 0 : options.duplex) ? { duplex: options.duplex } : {})));
|
|
return {
|
|
data: { path: cleanPath, id: data.Id, fullPath: data.Key },
|
|
error: null,
|
|
};
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Uploads a file to an existing bucket.
|
|
*
|
|
* @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
|
|
* @param fileBody The body of the file to be stored in the bucket.
|
|
*/
|
|
upload(path, fileBody, fileOptions) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
return this.uploadOrUpdate('POST', path, fileBody, fileOptions);
|
|
});
|
|
}
|
|
/**
|
|
* Upload a file with a token generated from `createSignedUploadUrl`.
|
|
* @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
|
|
* @param token The token generated from `createSignedUploadUrl`
|
|
* @param fileBody The body of the file to be stored in the bucket.
|
|
*/
|
|
uploadToSignedUrl(path, token, fileBody, fileOptions) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
const cleanPath = this._removeEmptyFolders(path);
|
|
const _path = this._getFinalPath(cleanPath);
|
|
const url = new URL(this.url + `/object/upload/sign/${_path}`);
|
|
url.searchParams.set('token', token);
|
|
try {
|
|
let body;
|
|
const options = Object.assign({ upsert: DEFAULT_FILE_OPTIONS.upsert }, fileOptions);
|
|
const headers = Object.assign(Object.assign({}, this.headers), { 'x-upsert': String(options.upsert) });
|
|
if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {
|
|
body = new FormData();
|
|
body.append('cacheControl', options.cacheControl);
|
|
body.append('', fileBody);
|
|
}
|
|
else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {
|
|
body = fileBody;
|
|
body.append('cacheControl', options.cacheControl);
|
|
}
|
|
else {
|
|
body = fileBody;
|
|
headers['cache-control'] = `max-age=${options.cacheControl}`;
|
|
headers['content-type'] = options.contentType;
|
|
}
|
|
const data = yield put(this.fetch, url.toString(), body, { headers });
|
|
return {
|
|
data: { path: cleanPath, fullPath: data.Key },
|
|
error: null,
|
|
};
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Creates a signed upload URL.
|
|
* Signed upload URLs can be used to upload files to the bucket without further authentication.
|
|
* They are valid for 2 hours.
|
|
* @param path The file path, including the current file name. For example `folder/image.png`.
|
|
* @param options.upsert If set to true, allows the file to be overwritten if it already exists.
|
|
*/
|
|
createSignedUploadUrl(path, options) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
let _path = this._getFinalPath(path);
|
|
const headers = Object.assign({}, this.headers);
|
|
if (options === null || options === void 0 ? void 0 : options.upsert) {
|
|
headers['x-upsert'] = 'true';
|
|
}
|
|
const data = yield post(this.fetch, `${this.url}/object/upload/sign/${_path}`, {}, { headers });
|
|
const url = new URL(this.url + data.url);
|
|
const token = url.searchParams.get('token');
|
|
if (!token) {
|
|
throw new StorageError('No token returned by API');
|
|
}
|
|
return { data: { signedUrl: url.toString(), path, token }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Replaces an existing file at the specified path with a new one.
|
|
*
|
|
* @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to update.
|
|
* @param fileBody The body of the file to be stored in the bucket.
|
|
*/
|
|
update(path, fileBody, fileOptions) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
return this.uploadOrUpdate('PUT', path, fileBody, fileOptions);
|
|
});
|
|
}
|
|
/**
|
|
* Moves an existing file to a new path in the same bucket.
|
|
*
|
|
* @param fromPath The original file path, including the current file name. For example `folder/image.png`.
|
|
* @param toPath The new file path, including the new file name. For example `folder/image-new.png`.
|
|
* @param options The destination options.
|
|
*/
|
|
move(fromPath, toPath, options) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield post(this.fetch, `${this.url}/object/move`, {
|
|
bucketId: this.bucketId,
|
|
sourceKey: fromPath,
|
|
destinationKey: toPath,
|
|
destinationBucket: options === null || options === void 0 ? void 0 : options.destinationBucket,
|
|
}, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Copies an existing file to a new path in the same bucket.
|
|
*
|
|
* @param fromPath The original file path, including the current file name. For example `folder/image.png`.
|
|
* @param toPath The new file path, including the new file name. For example `folder/image-copy.png`.
|
|
* @param options The destination options.
|
|
*/
|
|
copy(fromPath, toPath, options) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield post(this.fetch, `${this.url}/object/copy`, {
|
|
bucketId: this.bucketId,
|
|
sourceKey: fromPath,
|
|
destinationKey: toPath,
|
|
destinationBucket: options === null || options === void 0 ? void 0 : options.destinationBucket,
|
|
}, { headers: this.headers });
|
|
return { data: { path: data.Key }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Creates a signed URL. Use a signed URL to share a file for a fixed amount of time.
|
|
*
|
|
* @param path The file path, including the current file name. For example `folder/image.png`.
|
|
* @param expiresIn The number of seconds until the signed URL expires. For example, `60` for a URL which is valid for one minute.
|
|
* @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
|
|
* @param options.transform Transform the asset before serving it to the client.
|
|
*/
|
|
createSignedUrl(path, expiresIn, options) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
let _path = this._getFinalPath(path);
|
|
let data = yield post(this.fetch, `${this.url}/object/sign/${_path}`, Object.assign({ expiresIn }, ((options === null || options === void 0 ? void 0 : options.transform) ? { transform: options.transform } : {})), { headers: this.headers });
|
|
const downloadQueryParam = (options === null || options === void 0 ? void 0 : options.download)
|
|
? `&download=${options.download === true ? '' : options.download}`
|
|
: '';
|
|
const signedUrl = encodeURI(`${this.url}${data.signedURL}${downloadQueryParam}`);
|
|
data = { signedUrl };
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Creates multiple signed URLs. Use a signed URL to share a file for a fixed amount of time.
|
|
*
|
|
* @param paths The file paths to be downloaded, including the current file names. For example `['folder/image.png', 'folder2/image2.png']`.
|
|
* @param expiresIn The number of seconds until the signed URLs expire. For example, `60` for URLs which are valid for one minute.
|
|
* @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
|
|
*/
|
|
createSignedUrls(paths, expiresIn, options) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield post(this.fetch, `${this.url}/object/sign/${this.bucketId}`, { expiresIn, paths }, { headers: this.headers });
|
|
const downloadQueryParam = (options === null || options === void 0 ? void 0 : options.download)
|
|
? `&download=${options.download === true ? '' : options.download}`
|
|
: '';
|
|
return {
|
|
data: data.map((datum) => (Object.assign(Object.assign({}, datum), { signedUrl: datum.signedURL
|
|
? encodeURI(`${this.url}${datum.signedURL}${downloadQueryParam}`)
|
|
: null }))),
|
|
error: null,
|
|
};
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Downloads a file from a private bucket. For public buckets, make a request to the URL returned from `getPublicUrl` instead.
|
|
*
|
|
* @param path The full path and file name of the file to be downloaded. For example `folder/image.png`.
|
|
* @param options.transform Transform the asset before serving it to the client.
|
|
*/
|
|
download(path, options) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
const wantsTransformation = typeof (options === null || options === void 0 ? void 0 : options.transform) !== 'undefined';
|
|
const renderPath = wantsTransformation ? 'render/image/authenticated' : 'object';
|
|
const transformationQuery = this.transformOptsToQueryString((options === null || options === void 0 ? void 0 : options.transform) || {});
|
|
const queryString = transformationQuery ? `?${transformationQuery}` : '';
|
|
try {
|
|
const _path = this._getFinalPath(path);
|
|
const res = yield get(this.fetch, `${this.url}/${renderPath}/${_path}${queryString}`, {
|
|
headers: this.headers,
|
|
noResolveJson: true,
|
|
});
|
|
const data = yield res.blob();
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Retrieves the details of an existing file.
|
|
* @param path
|
|
*/
|
|
info(path) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
const _path = this._getFinalPath(path);
|
|
try {
|
|
const data = yield get(this.fetch, `${this.url}/object/info/${_path}`, {
|
|
headers: this.headers,
|
|
});
|
|
return { data: recursiveToCamel(data), error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Checks the existence of a file.
|
|
* @param path
|
|
*/
|
|
exists(path) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
const _path = this._getFinalPath(path);
|
|
try {
|
|
yield head(this.fetch, `${this.url}/object/${_path}`, {
|
|
headers: this.headers,
|
|
});
|
|
return { data: true, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error) && error instanceof StorageUnknownError) {
|
|
const originalError = error.originalError;
|
|
if ([400, 404].includes(originalError === null || originalError === void 0 ? void 0 : originalError.status)) {
|
|
return { data: false, error };
|
|
}
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* A simple convenience function to get the URL for an asset in a public bucket. If you do not want to use this function, you can construct the public URL by concatenating the bucket URL with the path to the asset.
|
|
* This function does not verify if the bucket is public. If a public URL is created for a bucket which is not public, you will not be able to download the asset.
|
|
*
|
|
* @param path The path and name of the file to generate the public URL for. For example `folder/image.png`.
|
|
* @param options.download Triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.
|
|
* @param options.transform Transform the asset before serving it to the client.
|
|
*/
|
|
getPublicUrl(path, options) {
|
|
const _path = this._getFinalPath(path);
|
|
const _queryString = [];
|
|
const downloadQueryParam = (options === null || options === void 0 ? void 0 : options.download)
|
|
? `download=${options.download === true ? '' : options.download}`
|
|
: '';
|
|
if (downloadQueryParam !== '') {
|
|
_queryString.push(downloadQueryParam);
|
|
}
|
|
const wantsTransformation = typeof (options === null || options === void 0 ? void 0 : options.transform) !== 'undefined';
|
|
const renderPath = wantsTransformation ? 'render/image' : 'object';
|
|
const transformationQuery = this.transformOptsToQueryString((options === null || options === void 0 ? void 0 : options.transform) || {});
|
|
if (transformationQuery !== '') {
|
|
_queryString.push(transformationQuery);
|
|
}
|
|
let queryString = _queryString.join('&');
|
|
if (queryString !== '') {
|
|
queryString = `?${queryString}`;
|
|
}
|
|
return {
|
|
data: { publicUrl: encodeURI(`${this.url}/${renderPath}/public/${_path}${queryString}`) },
|
|
};
|
|
}
|
|
/**
|
|
* Deletes files within the same bucket
|
|
*
|
|
* @param paths An array of files to delete, including the path and file name. For example [`'folder/image.png'`].
|
|
*/
|
|
remove(paths) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield remove(this.fetch, `${this.url}/object/${this.bucketId}`, { prefixes: paths }, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Get file metadata
|
|
* @param id the file id to retrieve metadata
|
|
*/
|
|
// async getMetadata(
|
|
// id: string
|
|
// ): Promise<
|
|
// | {
|
|
// data: Metadata
|
|
// error: null
|
|
// }
|
|
// | {
|
|
// data: null
|
|
// error: StorageError
|
|
// }
|
|
// > {
|
|
// try {
|
|
// const data = await get(this.fetch, `${this.url}/metadata/${id}`, { headers: this.headers })
|
|
// return { data, error: null }
|
|
// } catch (error) {
|
|
// if (isStorageError(error)) {
|
|
// return { data: null, error }
|
|
// }
|
|
// throw error
|
|
// }
|
|
// }
|
|
/**
|
|
* Update file metadata
|
|
* @param id the file id to update metadata
|
|
* @param meta the new file metadata
|
|
*/
|
|
// async updateMetadata(
|
|
// id: string,
|
|
// meta: Metadata
|
|
// ): Promise<
|
|
// | {
|
|
// data: Metadata
|
|
// error: null
|
|
// }
|
|
// | {
|
|
// data: null
|
|
// error: StorageError
|
|
// }
|
|
// > {
|
|
// try {
|
|
// const data = await post(
|
|
// this.fetch,
|
|
// `${this.url}/metadata/${id}`,
|
|
// { ...meta },
|
|
// { headers: this.headers }
|
|
// )
|
|
// return { data, error: null }
|
|
// } catch (error) {
|
|
// if (isStorageError(error)) {
|
|
// return { data: null, error }
|
|
// }
|
|
// throw error
|
|
// }
|
|
// }
|
|
/**
|
|
* Lists all the files within a bucket.
|
|
* @param path The folder path.
|
|
* @param options Search options including limit (defaults to 100), offset, sortBy, and search
|
|
*/
|
|
list(path, options, parameters) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
const body = Object.assign(Object.assign(Object.assign({}, DEFAULT_SEARCH_OPTIONS), options), { prefix: path || '' });
|
|
const data = yield post(this.fetch, `${this.url}/object/list/${this.bucketId}`, body, { headers: this.headers }, parameters);
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* @experimental this method signature might change in the future
|
|
* @param options search options
|
|
* @param parameters
|
|
*/
|
|
listV2(options, parameters) {
|
|
return __awaiter$4(this, void 0, void 0, function* () {
|
|
try {
|
|
const body = Object.assign({}, options);
|
|
const data = yield post(this.fetch, `${this.url}/object/list-v2/${this.bucketId}`, body, { headers: this.headers }, parameters);
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
encodeMetadata(metadata) {
|
|
return JSON.stringify(metadata);
|
|
}
|
|
toBase64(data) {
|
|
if (typeof Buffer !== 'undefined') {
|
|
return Buffer.from(data).toString('base64');
|
|
}
|
|
return btoa(data);
|
|
}
|
|
_getFinalPath(path) {
|
|
return `${this.bucketId}/${path.replace(/^\/+/, '')}`;
|
|
}
|
|
_removeEmptyFolders(path) {
|
|
return path.replace(/^\/|\/$/g, '').replace(/\/+/g, '/');
|
|
}
|
|
transformOptsToQueryString(transform) {
|
|
const params = [];
|
|
if (transform.width) {
|
|
params.push(`width=${transform.width}`);
|
|
}
|
|
if (transform.height) {
|
|
params.push(`height=${transform.height}`);
|
|
}
|
|
if (transform.resize) {
|
|
params.push(`resize=${transform.resize}`);
|
|
}
|
|
if (transform.format) {
|
|
params.push(`format=${transform.format}`);
|
|
}
|
|
if (transform.quality) {
|
|
params.push(`quality=${transform.quality}`);
|
|
}
|
|
return params.join('&');
|
|
}
|
|
}
|
|
|
|
// generated by genversion
|
|
const version$2 = '2.11.0';
|
|
|
|
const DEFAULT_HEADERS$2 = { 'X-Client-Info': `storage-js/${version$2}` };
|
|
|
|
var __awaiter$3 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
class StorageBucketApi {
|
|
constructor(url, headers = {}, fetch, opts) {
|
|
const baseUrl = new URL(url);
|
|
// if legacy uri is used, replace with new storage host (disables request buffering to allow > 50GB uploads)
|
|
// "project-ref.supabase.co" becomes "project-ref.storage.supabase.co"
|
|
if (opts === null || opts === void 0 ? void 0 : opts.useNewHostname) {
|
|
const isSupabaseHost = /supabase\.(co|in|red)$/.test(baseUrl.hostname);
|
|
if (isSupabaseHost && !baseUrl.hostname.includes('storage.supabase.')) {
|
|
baseUrl.hostname = baseUrl.hostname.replace('supabase.', 'storage.supabase.');
|
|
}
|
|
}
|
|
this.url = baseUrl.href;
|
|
this.headers = Object.assign(Object.assign({}, DEFAULT_HEADERS$2), headers);
|
|
this.fetch = resolveFetch$2(fetch);
|
|
}
|
|
/**
|
|
* Retrieves the details of all Storage buckets within an existing project.
|
|
*/
|
|
listBuckets() {
|
|
return __awaiter$3(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield get(this.fetch, `${this.url}/bucket`, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Retrieves the details of an existing Storage bucket.
|
|
*
|
|
* @param id The unique identifier of the bucket you would like to retrieve.
|
|
*/
|
|
getBucket(id) {
|
|
return __awaiter$3(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield get(this.fetch, `${this.url}/bucket/${id}`, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Creates a new Storage bucket
|
|
*
|
|
* @param id A unique identifier for the bucket you are creating.
|
|
* @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations. By default, buckets are private.
|
|
* @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.
|
|
* The global file size limit takes precedence over this value.
|
|
* The default value is null, which doesn't set a per bucket file size limit.
|
|
* @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.
|
|
* The default value is null, which allows files with all mime types to be uploaded.
|
|
* Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.
|
|
* @returns newly created bucket id
|
|
* @param options.type (private-beta) specifies the bucket type. see `BucketType` for more details.
|
|
* - default bucket type is `STANDARD`
|
|
*/
|
|
createBucket(id, options = {
|
|
public: false,
|
|
}) {
|
|
return __awaiter$3(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield post(this.fetch, `${this.url}/bucket`, {
|
|
id,
|
|
name: id,
|
|
type: options.type,
|
|
public: options.public,
|
|
file_size_limit: options.fileSizeLimit,
|
|
allowed_mime_types: options.allowedMimeTypes,
|
|
}, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Updates a Storage bucket
|
|
*
|
|
* @param id A unique identifier for the bucket you are updating.
|
|
* @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations.
|
|
* @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.
|
|
* The global file size limit takes precedence over this value.
|
|
* The default value is null, which doesn't set a per bucket file size limit.
|
|
* @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.
|
|
* The default value is null, which allows files with all mime types to be uploaded.
|
|
* Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.
|
|
*/
|
|
updateBucket(id, options) {
|
|
return __awaiter$3(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield put(this.fetch, `${this.url}/bucket/${id}`, {
|
|
id,
|
|
name: id,
|
|
public: options.public,
|
|
file_size_limit: options.fileSizeLimit,
|
|
allowed_mime_types: options.allowedMimeTypes,
|
|
}, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Removes all objects inside a single bucket.
|
|
*
|
|
* @param id The unique identifier of the bucket you would like to empty.
|
|
*/
|
|
emptyBucket(id) {
|
|
return __awaiter$3(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield post(this.fetch, `${this.url}/bucket/${id}/empty`, {}, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Deletes an existing bucket. A bucket can't be deleted with existing objects inside it.
|
|
* You must first `empty()` the bucket.
|
|
*
|
|
* @param id The unique identifier of the bucket you would like to delete.
|
|
*/
|
|
deleteBucket(id) {
|
|
return __awaiter$3(this, void 0, void 0, function* () {
|
|
try {
|
|
const data = yield remove(this.fetch, `${this.url}/bucket/${id}`, {}, { headers: this.headers });
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isStorageError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
class StorageClient extends StorageBucketApi {
|
|
constructor(url, headers = {}, fetch, opts) {
|
|
super(url, headers, fetch, opts);
|
|
}
|
|
/**
|
|
* Perform file operation in a bucket.
|
|
*
|
|
* @param id The bucket id to operate on.
|
|
*/
|
|
from(id) {
|
|
return new StorageFileApi(this.url, this.headers, id, this.fetch);
|
|
}
|
|
}
|
|
|
|
const version$1 = '2.56.0';
|
|
|
|
let JS_ENV = '';
|
|
// @ts-ignore
|
|
if (typeof Deno !== 'undefined') {
|
|
JS_ENV = 'deno';
|
|
}
|
|
else if (typeof document !== 'undefined') {
|
|
JS_ENV = 'web';
|
|
}
|
|
else if (typeof navigator !== 'undefined' && navigator.product === 'ReactNative') {
|
|
JS_ENV = 'react-native';
|
|
}
|
|
else {
|
|
JS_ENV = 'node';
|
|
}
|
|
const DEFAULT_HEADERS$1 = { 'X-Client-Info': `supabase-js-${JS_ENV}/${version$1}` };
|
|
const DEFAULT_GLOBAL_OPTIONS = {
|
|
headers: DEFAULT_HEADERS$1,
|
|
};
|
|
const DEFAULT_DB_OPTIONS = {
|
|
schema: 'public',
|
|
};
|
|
const DEFAULT_AUTH_OPTIONS = {
|
|
autoRefreshToken: true,
|
|
persistSession: true,
|
|
detectSessionInUrl: true,
|
|
flowType: 'implicit',
|
|
};
|
|
const DEFAULT_REALTIME_OPTIONS = {};
|
|
|
|
var __awaiter$2 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
const resolveFetch$1 = (customFetch) => {
|
|
let _fetch;
|
|
if (customFetch) {
|
|
_fetch = customFetch;
|
|
}
|
|
else if (typeof fetch === 'undefined') {
|
|
_fetch = nodeFetch;
|
|
}
|
|
else {
|
|
_fetch = fetch;
|
|
}
|
|
return (...args) => _fetch(...args);
|
|
};
|
|
const resolveHeadersConstructor = () => {
|
|
if (typeof Headers === 'undefined') {
|
|
return Headers$1;
|
|
}
|
|
return Headers;
|
|
};
|
|
const fetchWithAuth = (supabaseKey, getAccessToken, customFetch) => {
|
|
const fetch = resolveFetch$1(customFetch);
|
|
const HeadersConstructor = resolveHeadersConstructor();
|
|
return (input, init) => __awaiter$2(void 0, void 0, void 0, function* () {
|
|
var _a;
|
|
const accessToken = (_a = (yield getAccessToken())) !== null && _a !== void 0 ? _a : supabaseKey;
|
|
let headers = new HeadersConstructor(init === null || init === void 0 ? void 0 : init.headers);
|
|
if (!headers.has('apikey')) {
|
|
headers.set('apikey', supabaseKey);
|
|
}
|
|
if (!headers.has('Authorization')) {
|
|
headers.set('Authorization', `Bearer ${accessToken}`);
|
|
}
|
|
return fetch(input, Object.assign(Object.assign({}, init), { headers }));
|
|
});
|
|
};
|
|
|
|
var __awaiter$1 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
function ensureTrailingSlash(url) {
|
|
return url.endsWith('/') ? url : url + '/';
|
|
}
|
|
function applySettingDefaults(options, defaults) {
|
|
var _a, _b;
|
|
const { db: dbOptions, auth: authOptions, realtime: realtimeOptions, global: globalOptions, } = options;
|
|
const { db: DEFAULT_DB_OPTIONS, auth: DEFAULT_AUTH_OPTIONS, realtime: DEFAULT_REALTIME_OPTIONS, global: DEFAULT_GLOBAL_OPTIONS, } = defaults;
|
|
const result = {
|
|
db: Object.assign(Object.assign({}, DEFAULT_DB_OPTIONS), dbOptions),
|
|
auth: Object.assign(Object.assign({}, DEFAULT_AUTH_OPTIONS), authOptions),
|
|
realtime: Object.assign(Object.assign({}, DEFAULT_REALTIME_OPTIONS), realtimeOptions),
|
|
storage: {},
|
|
global: Object.assign(Object.assign(Object.assign({}, DEFAULT_GLOBAL_OPTIONS), globalOptions), { headers: Object.assign(Object.assign({}, ((_a = DEFAULT_GLOBAL_OPTIONS === null || DEFAULT_GLOBAL_OPTIONS === void 0 ? void 0 : DEFAULT_GLOBAL_OPTIONS.headers) !== null && _a !== void 0 ? _a : {})), ((_b = globalOptions === null || globalOptions === void 0 ? void 0 : globalOptions.headers) !== null && _b !== void 0 ? _b : {})) }),
|
|
accessToken: () => __awaiter$1(this, void 0, void 0, function* () { return ''; }),
|
|
};
|
|
if (options.accessToken) {
|
|
result.accessToken = options.accessToken;
|
|
}
|
|
else {
|
|
// hack around Required<>
|
|
delete result.accessToken;
|
|
}
|
|
return result;
|
|
}
|
|
|
|
const version = '2.71.1';
|
|
|
|
/** Current session will be checked for refresh at this interval. */
|
|
const AUTO_REFRESH_TICK_DURATION_MS = 30 * 1000;
|
|
/**
|
|
* A token refresh will be attempted this many ticks before the current session expires. */
|
|
const AUTO_REFRESH_TICK_THRESHOLD = 3;
|
|
/*
|
|
* Earliest time before an access token expires that the session should be refreshed.
|
|
*/
|
|
const EXPIRY_MARGIN_MS = AUTO_REFRESH_TICK_THRESHOLD * AUTO_REFRESH_TICK_DURATION_MS;
|
|
const GOTRUE_URL = 'http://localhost:9999';
|
|
const STORAGE_KEY = 'supabase.auth.token';
|
|
const DEFAULT_HEADERS = { 'X-Client-Info': `gotrue-js/${version}` };
|
|
const API_VERSION_HEADER_NAME = 'X-Supabase-Api-Version';
|
|
const API_VERSIONS = {
|
|
'2024-01-01': {
|
|
timestamp: Date.parse('2024-01-01T00:00:00.0Z'),
|
|
name: '2024-01-01',
|
|
},
|
|
};
|
|
const BASE64URL_REGEX = /^([a-z0-9_-]{4})*($|[a-z0-9_-]{3}$|[a-z0-9_-]{2}$)$/i;
|
|
const JWKS_TTL = 10 * 60 * 1000; // 10 minutes
|
|
|
|
class AuthError extends Error {
|
|
constructor(message, status, code) {
|
|
super(message);
|
|
this.__isAuthError = true;
|
|
this.name = 'AuthError';
|
|
this.status = status;
|
|
this.code = code;
|
|
}
|
|
}
|
|
function isAuthError(error) {
|
|
return typeof error === 'object' && error !== null && '__isAuthError' in error;
|
|
}
|
|
class AuthApiError extends AuthError {
|
|
constructor(message, status, code) {
|
|
super(message, status, code);
|
|
this.name = 'AuthApiError';
|
|
this.status = status;
|
|
this.code = code;
|
|
}
|
|
}
|
|
function isAuthApiError(error) {
|
|
return isAuthError(error) && error.name === 'AuthApiError';
|
|
}
|
|
class AuthUnknownError extends AuthError {
|
|
constructor(message, originalError) {
|
|
super(message);
|
|
this.name = 'AuthUnknownError';
|
|
this.originalError = originalError;
|
|
}
|
|
}
|
|
class CustomAuthError extends AuthError {
|
|
constructor(message, name, status, code) {
|
|
super(message, status, code);
|
|
this.name = name;
|
|
this.status = status;
|
|
}
|
|
}
|
|
class AuthSessionMissingError extends CustomAuthError {
|
|
constructor() {
|
|
super('Auth session missing!', 'AuthSessionMissingError', 400, undefined);
|
|
}
|
|
}
|
|
function isAuthSessionMissingError(error) {
|
|
return isAuthError(error) && error.name === 'AuthSessionMissingError';
|
|
}
|
|
class AuthInvalidTokenResponseError extends CustomAuthError {
|
|
constructor() {
|
|
super('Auth session or user missing', 'AuthInvalidTokenResponseError', 500, undefined);
|
|
}
|
|
}
|
|
class AuthInvalidCredentialsError extends CustomAuthError {
|
|
constructor(message) {
|
|
super(message, 'AuthInvalidCredentialsError', 400, undefined);
|
|
}
|
|
}
|
|
class AuthImplicitGrantRedirectError extends CustomAuthError {
|
|
constructor(message, details = null) {
|
|
super(message, 'AuthImplicitGrantRedirectError', 500, undefined);
|
|
this.details = null;
|
|
this.details = details;
|
|
}
|
|
toJSON() {
|
|
return {
|
|
name: this.name,
|
|
message: this.message,
|
|
status: this.status,
|
|
details: this.details,
|
|
};
|
|
}
|
|
}
|
|
function isAuthImplicitGrantRedirectError(error) {
|
|
return isAuthError(error) && error.name === 'AuthImplicitGrantRedirectError';
|
|
}
|
|
class AuthPKCEGrantCodeExchangeError extends CustomAuthError {
|
|
constructor(message, details = null) {
|
|
super(message, 'AuthPKCEGrantCodeExchangeError', 500, undefined);
|
|
this.details = null;
|
|
this.details = details;
|
|
}
|
|
toJSON() {
|
|
return {
|
|
name: this.name,
|
|
message: this.message,
|
|
status: this.status,
|
|
details: this.details,
|
|
};
|
|
}
|
|
}
|
|
class AuthRetryableFetchError extends CustomAuthError {
|
|
constructor(message, status) {
|
|
super(message, 'AuthRetryableFetchError', status, undefined);
|
|
}
|
|
}
|
|
function isAuthRetryableFetchError(error) {
|
|
return isAuthError(error) && error.name === 'AuthRetryableFetchError';
|
|
}
|
|
/**
|
|
* This error is thrown on certain methods when the password used is deemed
|
|
* weak. Inspect the reasons to identify what password strength rules are
|
|
* inadequate.
|
|
*/
|
|
class AuthWeakPasswordError extends CustomAuthError {
|
|
constructor(message, status, reasons) {
|
|
super(message, 'AuthWeakPasswordError', status, 'weak_password');
|
|
this.reasons = reasons;
|
|
}
|
|
}
|
|
class AuthInvalidJwtError extends CustomAuthError {
|
|
constructor(message) {
|
|
super(message, 'AuthInvalidJwtError', 400, 'invalid_jwt');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Avoid modifying this file. It's part of
|
|
* https://github.com/supabase-community/base64url-js. Submit all fixes on
|
|
* that repo!
|
|
*/
|
|
/**
|
|
* An array of characters that encode 6 bits into a Base64-URL alphabet
|
|
* character.
|
|
*/
|
|
const TO_BASE64URL = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'.split('');
|
|
/**
|
|
* An array of characters that can appear in a Base64-URL encoded string but
|
|
* should be ignored.
|
|
*/
|
|
const IGNORE_BASE64URL = ' \t\n\r='.split('');
|
|
/**
|
|
* An array of 128 numbers that map a Base64-URL character to 6 bits, or if -2
|
|
* used to skip the character, or if -1 used to error out.
|
|
*/
|
|
const FROM_BASE64URL = (() => {
|
|
const charMap = new Array(128);
|
|
for (let i = 0; i < charMap.length; i += 1) {
|
|
charMap[i] = -1;
|
|
}
|
|
for (let i = 0; i < IGNORE_BASE64URL.length; i += 1) {
|
|
charMap[IGNORE_BASE64URL[i].charCodeAt(0)] = -2;
|
|
}
|
|
for (let i = 0; i < TO_BASE64URL.length; i += 1) {
|
|
charMap[TO_BASE64URL[i].charCodeAt(0)] = i;
|
|
}
|
|
return charMap;
|
|
})();
|
|
/**
|
|
* Converts a byte to a Base64-URL string.
|
|
*
|
|
* @param byte The byte to convert, or null to flush at the end of the byte sequence.
|
|
* @param state The Base64 conversion state. Pass an initial value of `{ queue: 0, queuedBits: 0 }`.
|
|
* @param emit A function called with the next Base64 character when ready.
|
|
*/
|
|
function byteToBase64URL(byte, state, emit) {
|
|
if (byte !== null) {
|
|
state.queue = (state.queue << 8) | byte;
|
|
state.queuedBits += 8;
|
|
while (state.queuedBits >= 6) {
|
|
const pos = (state.queue >> (state.queuedBits - 6)) & 63;
|
|
emit(TO_BASE64URL[pos]);
|
|
state.queuedBits -= 6;
|
|
}
|
|
}
|
|
else if (state.queuedBits > 0) {
|
|
state.queue = state.queue << (6 - state.queuedBits);
|
|
state.queuedBits = 6;
|
|
while (state.queuedBits >= 6) {
|
|
const pos = (state.queue >> (state.queuedBits - 6)) & 63;
|
|
emit(TO_BASE64URL[pos]);
|
|
state.queuedBits -= 6;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Converts a String char code (extracted using `string.charCodeAt(position)`) to a sequence of Base64-URL characters.
|
|
*
|
|
* @param charCode The char code of the JavaScript string.
|
|
* @param state The Base64 state. Pass an initial value of `{ queue: 0, queuedBits: 0 }`.
|
|
* @param emit A function called with the next byte.
|
|
*/
|
|
function byteFromBase64URL(charCode, state, emit) {
|
|
const bits = FROM_BASE64URL[charCode];
|
|
if (bits > -1) {
|
|
// valid Base64-URL character
|
|
state.queue = (state.queue << 6) | bits;
|
|
state.queuedBits += 6;
|
|
while (state.queuedBits >= 8) {
|
|
emit((state.queue >> (state.queuedBits - 8)) & 0xff);
|
|
state.queuedBits -= 8;
|
|
}
|
|
}
|
|
else if (bits === -2) {
|
|
// ignore spaces, tabs, newlines, =
|
|
return;
|
|
}
|
|
else {
|
|
throw new Error(`Invalid Base64-URL character "${String.fromCharCode(charCode)}"`);
|
|
}
|
|
}
|
|
/**
|
|
* Converts a Base64-URL encoded string into a JavaScript string. It is assumed
|
|
* that the underlying string has been encoded as UTF-8.
|
|
*
|
|
* @param str The Base64-URL encoded string.
|
|
*/
|
|
function stringFromBase64URL(str) {
|
|
const conv = [];
|
|
const utf8Emit = (codepoint) => {
|
|
conv.push(String.fromCodePoint(codepoint));
|
|
};
|
|
const utf8State = {
|
|
utf8seq: 0,
|
|
codepoint: 0,
|
|
};
|
|
const b64State = { queue: 0, queuedBits: 0 };
|
|
const byteEmit = (byte) => {
|
|
stringFromUTF8(byte, utf8State, utf8Emit);
|
|
};
|
|
for (let i = 0; i < str.length; i += 1) {
|
|
byteFromBase64URL(str.charCodeAt(i), b64State, byteEmit);
|
|
}
|
|
return conv.join('');
|
|
}
|
|
/**
|
|
* Converts a Unicode codepoint to a multi-byte UTF-8 sequence.
|
|
*
|
|
* @param codepoint The Unicode codepoint.
|
|
* @param emit Function which will be called for each UTF-8 byte that represents the codepoint.
|
|
*/
|
|
function codepointToUTF8(codepoint, emit) {
|
|
if (codepoint <= 0x7f) {
|
|
emit(codepoint);
|
|
return;
|
|
}
|
|
else if (codepoint <= 0x7ff) {
|
|
emit(0xc0 | (codepoint >> 6));
|
|
emit(0x80 | (codepoint & 0x3f));
|
|
return;
|
|
}
|
|
else if (codepoint <= 0xffff) {
|
|
emit(0xe0 | (codepoint >> 12));
|
|
emit(0x80 | ((codepoint >> 6) & 0x3f));
|
|
emit(0x80 | (codepoint & 0x3f));
|
|
return;
|
|
}
|
|
else if (codepoint <= 0x10ffff) {
|
|
emit(0xf0 | (codepoint >> 18));
|
|
emit(0x80 | ((codepoint >> 12) & 0x3f));
|
|
emit(0x80 | ((codepoint >> 6) & 0x3f));
|
|
emit(0x80 | (codepoint & 0x3f));
|
|
return;
|
|
}
|
|
throw new Error(`Unrecognized Unicode codepoint: ${codepoint.toString(16)}`);
|
|
}
|
|
/**
|
|
* Converts a JavaScript string to a sequence of UTF-8 bytes.
|
|
*
|
|
* @param str The string to convert to UTF-8.
|
|
* @param emit Function which will be called for each UTF-8 byte of the string.
|
|
*/
|
|
function stringToUTF8(str, emit) {
|
|
for (let i = 0; i < str.length; i += 1) {
|
|
let codepoint = str.charCodeAt(i);
|
|
if (codepoint > 0xd7ff && codepoint <= 0xdbff) {
|
|
// most UTF-16 codepoints are Unicode codepoints, except values in this
|
|
// range where the next UTF-16 codepoint needs to be combined with the
|
|
// current one to get the Unicode codepoint
|
|
const highSurrogate = ((codepoint - 0xd800) * 0x400) & 0xffff;
|
|
const lowSurrogate = (str.charCodeAt(i + 1) - 0xdc00) & 0xffff;
|
|
codepoint = (lowSurrogate | highSurrogate) + 0x10000;
|
|
i += 1;
|
|
}
|
|
codepointToUTF8(codepoint, emit);
|
|
}
|
|
}
|
|
/**
|
|
* Converts a UTF-8 byte to a Unicode codepoint.
|
|
*
|
|
* @param byte The UTF-8 byte next in the sequence.
|
|
* @param state The shared state between consecutive UTF-8 bytes in the
|
|
* sequence, an object with the shape `{ utf8seq: 0, codepoint: 0 }`.
|
|
* @param emit Function which will be called for each codepoint.
|
|
*/
|
|
function stringFromUTF8(byte, state, emit) {
|
|
if (state.utf8seq === 0) {
|
|
if (byte <= 0x7f) {
|
|
emit(byte);
|
|
return;
|
|
}
|
|
// count the number of 1 leading bits until you reach 0
|
|
for (let leadingBit = 1; leadingBit < 6; leadingBit += 1) {
|
|
if (((byte >> (7 - leadingBit)) & 1) === 0) {
|
|
state.utf8seq = leadingBit;
|
|
break;
|
|
}
|
|
}
|
|
if (state.utf8seq === 2) {
|
|
state.codepoint = byte & 31;
|
|
}
|
|
else if (state.utf8seq === 3) {
|
|
state.codepoint = byte & 15;
|
|
}
|
|
else if (state.utf8seq === 4) {
|
|
state.codepoint = byte & 7;
|
|
}
|
|
else {
|
|
throw new Error('Invalid UTF-8 sequence');
|
|
}
|
|
state.utf8seq -= 1;
|
|
}
|
|
else if (state.utf8seq > 0) {
|
|
if (byte <= 0x7f) {
|
|
throw new Error('Invalid UTF-8 sequence');
|
|
}
|
|
state.codepoint = (state.codepoint << 6) | (byte & 63);
|
|
state.utf8seq -= 1;
|
|
if (state.utf8seq === 0) {
|
|
emit(state.codepoint);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Helper functions to convert different types of strings to Uint8Array
|
|
*/
|
|
function base64UrlToUint8Array(str) {
|
|
const result = [];
|
|
const state = { queue: 0, queuedBits: 0 };
|
|
const onByte = (byte) => {
|
|
result.push(byte);
|
|
};
|
|
for (let i = 0; i < str.length; i += 1) {
|
|
byteFromBase64URL(str.charCodeAt(i), state, onByte);
|
|
}
|
|
return new Uint8Array(result);
|
|
}
|
|
function stringToUint8Array(str) {
|
|
const result = [];
|
|
stringToUTF8(str, (byte) => result.push(byte));
|
|
return new Uint8Array(result);
|
|
}
|
|
function bytesToBase64URL(bytes) {
|
|
const result = [];
|
|
const state = { queue: 0, queuedBits: 0 };
|
|
const onChar = (char) => {
|
|
result.push(char);
|
|
};
|
|
bytes.forEach((byte) => byteToBase64URL(byte, state, onChar));
|
|
// always call with `null` after processing all bytes
|
|
byteToBase64URL(null, state, onChar);
|
|
return result.join('');
|
|
}
|
|
|
|
function expiresAt(expiresIn) {
|
|
const timeNow = Math.round(Date.now() / 1000);
|
|
return timeNow + expiresIn;
|
|
}
|
|
function uuid() {
|
|
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
|
|
const r = (Math.random() * 16) | 0, v = c == 'x' ? r : (r & 0x3) | 0x8;
|
|
return v.toString(16);
|
|
});
|
|
}
|
|
const isBrowser = () => typeof window !== 'undefined' && typeof document !== 'undefined';
|
|
const localStorageWriteTests = {
|
|
tested: false,
|
|
writable: false,
|
|
};
|
|
/**
|
|
* Checks whether localStorage is supported on this browser.
|
|
*/
|
|
const supportsLocalStorage = () => {
|
|
if (!isBrowser()) {
|
|
return false;
|
|
}
|
|
try {
|
|
if (typeof globalThis.localStorage !== 'object') {
|
|
return false;
|
|
}
|
|
}
|
|
catch (e) {
|
|
// DOM exception when accessing `localStorage`
|
|
return false;
|
|
}
|
|
if (localStorageWriteTests.tested) {
|
|
return localStorageWriteTests.writable;
|
|
}
|
|
const randomKey = `lswt-${Math.random()}${Math.random()}`;
|
|
try {
|
|
globalThis.localStorage.setItem(randomKey, randomKey);
|
|
globalThis.localStorage.removeItem(randomKey);
|
|
localStorageWriteTests.tested = true;
|
|
localStorageWriteTests.writable = true;
|
|
}
|
|
catch (e) {
|
|
// localStorage can't be written to
|
|
// https://www.chromium.org/for-testers/bug-reporting-guidelines/uncaught-securityerror-failed-to-read-the-localstorage-property-from-window-access-is-denied-for-this-document
|
|
localStorageWriteTests.tested = true;
|
|
localStorageWriteTests.writable = false;
|
|
}
|
|
return localStorageWriteTests.writable;
|
|
};
|
|
/**
|
|
* Extracts parameters encoded in the URL both in the query and fragment.
|
|
*/
|
|
function parseParametersFromURL(href) {
|
|
const result = {};
|
|
const url = new URL(href);
|
|
if (url.hash && url.hash[0] === '#') {
|
|
try {
|
|
const hashSearchParams = new URLSearchParams(url.hash.substring(1));
|
|
hashSearchParams.forEach((value, key) => {
|
|
result[key] = value;
|
|
});
|
|
}
|
|
catch (e) {
|
|
// hash is not a query string
|
|
}
|
|
}
|
|
// search parameters take precedence over hash parameters
|
|
url.searchParams.forEach((value, key) => {
|
|
result[key] = value;
|
|
});
|
|
return result;
|
|
}
|
|
const resolveFetch = (customFetch) => {
|
|
let _fetch;
|
|
if (customFetch) {
|
|
_fetch = customFetch;
|
|
}
|
|
else if (typeof fetch === 'undefined') {
|
|
_fetch = (...args) => __vitePreload(async () => { const {default: fetch} = await Promise.resolve().then(() => browser);return { default: fetch }},true?void 0:void 0).then(({ default: fetch }) => fetch(...args));
|
|
}
|
|
else {
|
|
_fetch = fetch;
|
|
}
|
|
return (...args) => _fetch(...args);
|
|
};
|
|
const looksLikeFetchResponse = (maybeResponse) => {
|
|
return (typeof maybeResponse === 'object' &&
|
|
maybeResponse !== null &&
|
|
'status' in maybeResponse &&
|
|
'ok' in maybeResponse &&
|
|
'json' in maybeResponse &&
|
|
typeof maybeResponse.json === 'function');
|
|
};
|
|
// Storage helpers
|
|
const setItemAsync = async (storage, key, data) => {
|
|
await storage.setItem(key, JSON.stringify(data));
|
|
};
|
|
const getItemAsync = async (storage, key) => {
|
|
const value = await storage.getItem(key);
|
|
if (!value) {
|
|
return null;
|
|
}
|
|
try {
|
|
return JSON.parse(value);
|
|
}
|
|
catch (_a) {
|
|
return value;
|
|
}
|
|
};
|
|
const removeItemAsync = async (storage, key) => {
|
|
await storage.removeItem(key);
|
|
};
|
|
/**
|
|
* A deferred represents some asynchronous work that is not yet finished, which
|
|
* may or may not culminate in a value.
|
|
* Taken from: https://github.com/mike-north/types/blob/master/src/async.ts
|
|
*/
|
|
class Deferred {
|
|
constructor() {
|
|
this.promise = new Deferred.promiseConstructor((res, rej) => {
|
|
this.resolve = res;
|
|
this.reject = rej;
|
|
});
|
|
}
|
|
}
|
|
Deferred.promiseConstructor = Promise;
|
|
function decodeJWT(token) {
|
|
const parts = token.split('.');
|
|
if (parts.length !== 3) {
|
|
throw new AuthInvalidJwtError('Invalid JWT structure');
|
|
}
|
|
// Regex checks for base64url format
|
|
for (let i = 0; i < parts.length; i++) {
|
|
if (!BASE64URL_REGEX.test(parts[i])) {
|
|
throw new AuthInvalidJwtError('JWT not in base64url format');
|
|
}
|
|
}
|
|
const data = {
|
|
// using base64url lib
|
|
header: JSON.parse(stringFromBase64URL(parts[0])),
|
|
payload: JSON.parse(stringFromBase64URL(parts[1])),
|
|
signature: base64UrlToUint8Array(parts[2]),
|
|
raw: {
|
|
header: parts[0],
|
|
payload: parts[1],
|
|
},
|
|
};
|
|
return data;
|
|
}
|
|
/**
|
|
* Creates a promise that resolves to null after some time.
|
|
*/
|
|
async function sleep(time) {
|
|
return await new Promise((accept) => {
|
|
setTimeout(() => accept(null), time);
|
|
});
|
|
}
|
|
/**
|
|
* Converts the provided async function into a retryable function. Each result
|
|
* or thrown error is sent to the isRetryable function which should return true
|
|
* if the function should run again.
|
|
*/
|
|
function retryable(fn, isRetryable) {
|
|
const promise = new Promise((accept, reject) => {
|
|
(async () => {
|
|
for (let attempt = 0; attempt < Infinity; attempt++) {
|
|
try {
|
|
const result = await fn(attempt);
|
|
if (!isRetryable(attempt, null, result)) {
|
|
accept(result);
|
|
return;
|
|
}
|
|
}
|
|
catch (e) {
|
|
if (!isRetryable(attempt, e)) {
|
|
reject(e);
|
|
return;
|
|
}
|
|
}
|
|
}
|
|
})();
|
|
});
|
|
return promise;
|
|
}
|
|
function dec2hex(dec) {
|
|
return ('0' + dec.toString(16)).substr(-2);
|
|
}
|
|
// Functions below taken from: https://stackoverflow.com/questions/63309409/creating-a-code-verifier-and-challenge-for-pkce-auth-on-spotify-api-in-reactjs
|
|
function generatePKCEVerifier() {
|
|
const verifierLength = 56;
|
|
const array = new Uint32Array(verifierLength);
|
|
if (typeof crypto === 'undefined') {
|
|
const charSet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~';
|
|
const charSetLen = charSet.length;
|
|
let verifier = '';
|
|
for (let i = 0; i < verifierLength; i++) {
|
|
verifier += charSet.charAt(Math.floor(Math.random() * charSetLen));
|
|
}
|
|
return verifier;
|
|
}
|
|
crypto.getRandomValues(array);
|
|
return Array.from(array, dec2hex).join('');
|
|
}
|
|
async function sha256(randomString) {
|
|
const encoder = new TextEncoder();
|
|
const encodedData = encoder.encode(randomString);
|
|
const hash = await crypto.subtle.digest('SHA-256', encodedData);
|
|
const bytes = new Uint8Array(hash);
|
|
return Array.from(bytes)
|
|
.map((c) => String.fromCharCode(c))
|
|
.join('');
|
|
}
|
|
async function generatePKCEChallenge(verifier) {
|
|
const hasCryptoSupport = typeof crypto !== 'undefined' &&
|
|
typeof crypto.subtle !== 'undefined' &&
|
|
typeof TextEncoder !== 'undefined';
|
|
if (!hasCryptoSupport) {
|
|
console.warn('WebCrypto API is not supported. Code challenge method will default to use plain instead of sha256.');
|
|
return verifier;
|
|
}
|
|
const hashed = await sha256(verifier);
|
|
return btoa(hashed).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
|
|
}
|
|
async function getCodeChallengeAndMethod(storage, storageKey, isPasswordRecovery = false) {
|
|
const codeVerifier = generatePKCEVerifier();
|
|
let storedCodeVerifier = codeVerifier;
|
|
if (isPasswordRecovery) {
|
|
storedCodeVerifier += '/PASSWORD_RECOVERY';
|
|
}
|
|
await setItemAsync(storage, `${storageKey}-code-verifier`, storedCodeVerifier);
|
|
const codeChallenge = await generatePKCEChallenge(codeVerifier);
|
|
const codeChallengeMethod = codeVerifier === codeChallenge ? 'plain' : 's256';
|
|
return [codeChallenge, codeChallengeMethod];
|
|
}
|
|
/** Parses the API version which is 2YYY-MM-DD. */
|
|
const API_VERSION_REGEX = /^2[0-9]{3}-(0[1-9]|1[0-2])-(0[1-9]|1[0-9]|2[0-9]|3[0-1])$/i;
|
|
function parseResponseAPIVersion(response) {
|
|
const apiVersion = response.headers.get(API_VERSION_HEADER_NAME);
|
|
if (!apiVersion) {
|
|
return null;
|
|
}
|
|
if (!apiVersion.match(API_VERSION_REGEX)) {
|
|
return null;
|
|
}
|
|
try {
|
|
const date = new Date(`${apiVersion}T00:00:00.0Z`);
|
|
return date;
|
|
}
|
|
catch (e) {
|
|
return null;
|
|
}
|
|
}
|
|
function validateExp(exp) {
|
|
if (!exp) {
|
|
throw new Error('Missing exp claim');
|
|
}
|
|
const timeNow = Math.floor(Date.now() / 1000);
|
|
if (exp <= timeNow) {
|
|
throw new Error('JWT has expired');
|
|
}
|
|
}
|
|
function getAlgorithm(alg) {
|
|
switch (alg) {
|
|
case 'RS256':
|
|
return {
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
hash: { name: 'SHA-256' },
|
|
};
|
|
case 'ES256':
|
|
return {
|
|
name: 'ECDSA',
|
|
namedCurve: 'P-256',
|
|
hash: { name: 'SHA-256' },
|
|
};
|
|
default:
|
|
throw new Error('Invalid alg claim');
|
|
}
|
|
}
|
|
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/;
|
|
function validateUUID(str) {
|
|
if (!UUID_REGEX.test(str)) {
|
|
throw new Error('@supabase/auth-js: Expected parameter to be UUID but is not');
|
|
}
|
|
}
|
|
function userNotAvailableProxy() {
|
|
const proxyTarget = {};
|
|
return new Proxy(proxyTarget, {
|
|
get: (target, prop) => {
|
|
if (prop === '__isUserNotAvailableProxy') {
|
|
return true;
|
|
}
|
|
// Preventative check for common problematic symbols during cloning/inspection
|
|
// These symbols might be accessed by structuredClone or other internal mechanisms.
|
|
if (typeof prop === 'symbol') {
|
|
const sProp = prop.toString();
|
|
if (sProp === 'Symbol(Symbol.toPrimitive)' ||
|
|
sProp === 'Symbol(Symbol.toStringTag)' ||
|
|
sProp === 'Symbol(util.inspect.custom)') {
|
|
// Node.js util.inspect
|
|
return undefined;
|
|
}
|
|
}
|
|
throw new Error(`@supabase/auth-js: client was created with userStorage option and there was no user stored in the user storage. Accessing the "${prop}" property of the session object is not supported. Please use getUser() instead.`);
|
|
},
|
|
set: (_target, prop) => {
|
|
throw new Error(`@supabase/auth-js: client was created with userStorage option and there was no user stored in the user storage. Setting the "${prop}" property of the session object is not supported. Please use getUser() to fetch a user object you can manipulate.`);
|
|
},
|
|
deleteProperty: (_target, prop) => {
|
|
throw new Error(`@supabase/auth-js: client was created with userStorage option and there was no user stored in the user storage. Deleting the "${prop}" property of the session object is not supported. Please use getUser() to fetch a user object you can manipulate.`);
|
|
},
|
|
});
|
|
}
|
|
/**
|
|
* Deep clones a JSON-serializable object using JSON.parse(JSON.stringify(obj)).
|
|
* Note: Only works for JSON-safe data.
|
|
*/
|
|
function deepClone(obj) {
|
|
return JSON.parse(JSON.stringify(obj));
|
|
}
|
|
|
|
var __rest$1 = (undefined && undefined.__rest) || function (s, e) {
|
|
var t = {};
|
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
t[p] = s[p];
|
|
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
t[p[i]] = s[p[i]];
|
|
}
|
|
return t;
|
|
};
|
|
const _getErrorMessage = (err) => err.msg || err.message || err.error_description || err.error || JSON.stringify(err);
|
|
const NETWORK_ERROR_CODES = [502, 503, 504];
|
|
async function handleError(error) {
|
|
var _a;
|
|
if (!looksLikeFetchResponse(error)) {
|
|
throw new AuthRetryableFetchError(_getErrorMessage(error), 0);
|
|
}
|
|
if (NETWORK_ERROR_CODES.includes(error.status)) {
|
|
// status in 500...599 range - server had an error, request might be retryed.
|
|
throw new AuthRetryableFetchError(_getErrorMessage(error), error.status);
|
|
}
|
|
let data;
|
|
try {
|
|
data = await error.json();
|
|
}
|
|
catch (e) {
|
|
throw new AuthUnknownError(_getErrorMessage(e), e);
|
|
}
|
|
let errorCode = undefined;
|
|
const responseAPIVersion = parseResponseAPIVersion(error);
|
|
if (responseAPIVersion &&
|
|
responseAPIVersion.getTime() >= API_VERSIONS['2024-01-01'].timestamp &&
|
|
typeof data === 'object' &&
|
|
data &&
|
|
typeof data.code === 'string') {
|
|
errorCode = data.code;
|
|
}
|
|
else if (typeof data === 'object' && data && typeof data.error_code === 'string') {
|
|
errorCode = data.error_code;
|
|
}
|
|
if (!errorCode) {
|
|
// Legacy support for weak password errors, when there were no error codes
|
|
if (typeof data === 'object' &&
|
|
data &&
|
|
typeof data.weak_password === 'object' &&
|
|
data.weak_password &&
|
|
Array.isArray(data.weak_password.reasons) &&
|
|
data.weak_password.reasons.length &&
|
|
data.weak_password.reasons.reduce((a, i) => a && typeof i === 'string', true)) {
|
|
throw new AuthWeakPasswordError(_getErrorMessage(data), error.status, data.weak_password.reasons);
|
|
}
|
|
}
|
|
else if (errorCode === 'weak_password') {
|
|
throw new AuthWeakPasswordError(_getErrorMessage(data), error.status, ((_a = data.weak_password) === null || _a === void 0 ? void 0 : _a.reasons) || []);
|
|
}
|
|
else if (errorCode === 'session_not_found') {
|
|
// The `session_id` inside the JWT does not correspond to a row in the
|
|
// `sessions` table. This usually means the user has signed out, has been
|
|
// deleted, or their session has somehow been terminated.
|
|
throw new AuthSessionMissingError();
|
|
}
|
|
throw new AuthApiError(_getErrorMessage(data), error.status || 500, errorCode);
|
|
}
|
|
const _getRequestParams = (method, options, parameters, body) => {
|
|
const params = { method, headers: (options === null || options === void 0 ? void 0 : options.headers) || {} };
|
|
if (method === 'GET') {
|
|
return params;
|
|
}
|
|
params.headers = Object.assign({ 'Content-Type': 'application/json;charset=UTF-8' }, options === null || options === void 0 ? void 0 : options.headers);
|
|
params.body = JSON.stringify(body);
|
|
return Object.assign(Object.assign({}, params), parameters);
|
|
};
|
|
async function _request(fetcher, method, url, options) {
|
|
var _a;
|
|
const headers = Object.assign({}, options === null || options === void 0 ? void 0 : options.headers);
|
|
if (!headers[API_VERSION_HEADER_NAME]) {
|
|
headers[API_VERSION_HEADER_NAME] = API_VERSIONS['2024-01-01'].name;
|
|
}
|
|
if (options === null || options === void 0 ? void 0 : options.jwt) {
|
|
headers['Authorization'] = `Bearer ${options.jwt}`;
|
|
}
|
|
const qs = (_a = options === null || options === void 0 ? void 0 : options.query) !== null && _a !== void 0 ? _a : {};
|
|
if (options === null || options === void 0 ? void 0 : options.redirectTo) {
|
|
qs['redirect_to'] = options.redirectTo;
|
|
}
|
|
const queryString = Object.keys(qs).length ? '?' + new URLSearchParams(qs).toString() : '';
|
|
const data = await _handleRequest(fetcher, method, url + queryString, {
|
|
headers,
|
|
noResolveJson: options === null || options === void 0 ? void 0 : options.noResolveJson,
|
|
}, {}, options === null || options === void 0 ? void 0 : options.body);
|
|
return (options === null || options === void 0 ? void 0 : options.xform) ? options === null || options === void 0 ? void 0 : options.xform(data) : { data: Object.assign({}, data), error: null };
|
|
}
|
|
async function _handleRequest(fetcher, method, url, options, parameters, body) {
|
|
const requestParams = _getRequestParams(method, options, parameters, body);
|
|
let result;
|
|
try {
|
|
result = await fetcher(url, Object.assign({}, requestParams));
|
|
}
|
|
catch (e) {
|
|
console.error(e);
|
|
// fetch failed, likely due to a network or CORS error
|
|
throw new AuthRetryableFetchError(_getErrorMessage(e), 0);
|
|
}
|
|
if (!result.ok) {
|
|
await handleError(result);
|
|
}
|
|
if (options === null || options === void 0 ? void 0 : options.noResolveJson) {
|
|
return result;
|
|
}
|
|
try {
|
|
return await result.json();
|
|
}
|
|
catch (e) {
|
|
await handleError(e);
|
|
}
|
|
}
|
|
function _sessionResponse(data) {
|
|
var _a;
|
|
let session = null;
|
|
if (hasSession(data)) {
|
|
session = Object.assign({}, data);
|
|
if (!data.expires_at) {
|
|
session.expires_at = expiresAt(data.expires_in);
|
|
}
|
|
}
|
|
const user = (_a = data.user) !== null && _a !== void 0 ? _a : data;
|
|
return { data: { session, user }, error: null };
|
|
}
|
|
function _sessionResponsePassword(data) {
|
|
const response = _sessionResponse(data);
|
|
if (!response.error &&
|
|
data.weak_password &&
|
|
typeof data.weak_password === 'object' &&
|
|
Array.isArray(data.weak_password.reasons) &&
|
|
data.weak_password.reasons.length &&
|
|
data.weak_password.message &&
|
|
typeof data.weak_password.message === 'string' &&
|
|
data.weak_password.reasons.reduce((a, i) => a && typeof i === 'string', true)) {
|
|
response.data.weak_password = data.weak_password;
|
|
}
|
|
return response;
|
|
}
|
|
function _userResponse(data) {
|
|
var _a;
|
|
const user = (_a = data.user) !== null && _a !== void 0 ? _a : data;
|
|
return { data: { user }, error: null };
|
|
}
|
|
function _ssoResponse(data) {
|
|
return { data, error: null };
|
|
}
|
|
function _generateLinkResponse(data) {
|
|
const { action_link, email_otp, hashed_token, redirect_to, verification_type } = data, rest = __rest$1(data, ["action_link", "email_otp", "hashed_token", "redirect_to", "verification_type"]);
|
|
const properties = {
|
|
action_link,
|
|
email_otp,
|
|
hashed_token,
|
|
redirect_to,
|
|
verification_type,
|
|
};
|
|
const user = Object.assign({}, rest);
|
|
return {
|
|
data: {
|
|
properties,
|
|
user,
|
|
},
|
|
error: null,
|
|
};
|
|
}
|
|
function _noResolveJsonResponse(data) {
|
|
return data;
|
|
}
|
|
/**
|
|
* hasSession checks if the response object contains a valid session
|
|
* @param data A response object
|
|
* @returns true if a session is in the response
|
|
*/
|
|
function hasSession(data) {
|
|
return data.access_token && data.refresh_token && data.expires_in;
|
|
}
|
|
|
|
const SIGN_OUT_SCOPES = ['global', 'local', 'others'];
|
|
|
|
var __rest = (undefined && undefined.__rest) || function (s, e) {
|
|
var t = {};
|
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
t[p] = s[p];
|
|
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
t[p[i]] = s[p[i]];
|
|
}
|
|
return t;
|
|
};
|
|
class GoTrueAdminApi {
|
|
constructor({ url = '', headers = {}, fetch, }) {
|
|
this.url = url;
|
|
this.headers = headers;
|
|
this.fetch = resolveFetch(fetch);
|
|
this.mfa = {
|
|
listFactors: this._listFactors.bind(this),
|
|
deleteFactor: this._deleteFactor.bind(this),
|
|
};
|
|
}
|
|
/**
|
|
* Removes a logged-in session.
|
|
* @param jwt A valid, logged-in JWT.
|
|
* @param scope The logout sope.
|
|
*/
|
|
async signOut(jwt, scope = SIGN_OUT_SCOPES[0]) {
|
|
if (SIGN_OUT_SCOPES.indexOf(scope) < 0) {
|
|
throw new Error(`@supabase/auth-js: Parameter scope must be one of ${SIGN_OUT_SCOPES.join(', ')}`);
|
|
}
|
|
try {
|
|
await _request(this.fetch, 'POST', `${this.url}/logout?scope=${scope}`, {
|
|
headers: this.headers,
|
|
jwt,
|
|
noResolveJson: true,
|
|
});
|
|
return { data: null, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Sends an invite link to an email address.
|
|
* @param email The email address of the user.
|
|
* @param options Additional options to be included when inviting.
|
|
*/
|
|
async inviteUserByEmail(email, options = {}) {
|
|
try {
|
|
return await _request(this.fetch, 'POST', `${this.url}/invite`, {
|
|
body: { email, data: options.data },
|
|
headers: this.headers,
|
|
redirectTo: options.redirectTo,
|
|
xform: _userResponse,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Generates email links and OTPs to be sent via a custom email provider.
|
|
* @param email The user's email.
|
|
* @param options.password User password. For signup only.
|
|
* @param options.data Optional user metadata. For signup only.
|
|
* @param options.redirectTo The redirect url which should be appended to the generated link
|
|
*/
|
|
async generateLink(params) {
|
|
try {
|
|
const { options } = params, rest = __rest(params, ["options"]);
|
|
const body = Object.assign(Object.assign({}, rest), options);
|
|
if ('newEmail' in rest) {
|
|
// replace newEmail with new_email in request body
|
|
body.new_email = rest === null || rest === void 0 ? void 0 : rest.newEmail;
|
|
delete body['newEmail'];
|
|
}
|
|
return await _request(this.fetch, 'POST', `${this.url}/admin/generate_link`, {
|
|
body: body,
|
|
headers: this.headers,
|
|
xform: _generateLinkResponse,
|
|
redirectTo: options === null || options === void 0 ? void 0 : options.redirectTo,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return {
|
|
data: {
|
|
properties: null,
|
|
user: null,
|
|
},
|
|
error,
|
|
};
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
// User Admin API
|
|
/**
|
|
* Creates a new user.
|
|
* This function should only be called on a server. Never expose your `service_role` key in the browser.
|
|
*/
|
|
async createUser(attributes) {
|
|
try {
|
|
return await _request(this.fetch, 'POST', `${this.url}/admin/users`, {
|
|
body: attributes,
|
|
headers: this.headers,
|
|
xform: _userResponse,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Get a list of users.
|
|
*
|
|
* This function should only be called on a server. Never expose your `service_role` key in the browser.
|
|
* @param params An object which supports `page` and `perPage` as numbers, to alter the paginated results.
|
|
*/
|
|
async listUsers(params) {
|
|
var _a, _b, _c, _d, _e, _f, _g;
|
|
try {
|
|
const pagination = { nextPage: null, lastPage: 0, total: 0 };
|
|
const response = await _request(this.fetch, 'GET', `${this.url}/admin/users`, {
|
|
headers: this.headers,
|
|
noResolveJson: true,
|
|
query: {
|
|
page: (_b = (_a = params === null || params === void 0 ? void 0 : params.page) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : '',
|
|
per_page: (_d = (_c = params === null || params === void 0 ? void 0 : params.perPage) === null || _c === void 0 ? void 0 : _c.toString()) !== null && _d !== void 0 ? _d : '',
|
|
},
|
|
xform: _noResolveJsonResponse,
|
|
});
|
|
if (response.error)
|
|
throw response.error;
|
|
const users = await response.json();
|
|
const total = (_e = response.headers.get('x-total-count')) !== null && _e !== void 0 ? _e : 0;
|
|
const links = (_g = (_f = response.headers.get('link')) === null || _f === void 0 ? void 0 : _f.split(',')) !== null && _g !== void 0 ? _g : [];
|
|
if (links.length > 0) {
|
|
links.forEach((link) => {
|
|
const page = parseInt(link.split(';')[0].split('=')[1].substring(0, 1));
|
|
const rel = JSON.parse(link.split(';')[1].split('=')[1]);
|
|
pagination[`${rel}Page`] = page;
|
|
});
|
|
pagination.total = parseInt(total);
|
|
}
|
|
return { data: Object.assign(Object.assign({}, users), pagination), error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { users: [] }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Get user by id.
|
|
*
|
|
* @param uid The user's unique identifier
|
|
*
|
|
* This function should only be called on a server. Never expose your `service_role` key in the browser.
|
|
*/
|
|
async getUserById(uid) {
|
|
validateUUID(uid);
|
|
try {
|
|
return await _request(this.fetch, 'GET', `${this.url}/admin/users/${uid}`, {
|
|
headers: this.headers,
|
|
xform: _userResponse,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Updates the user data.
|
|
*
|
|
* @param attributes The data you want to update.
|
|
*
|
|
* This function should only be called on a server. Never expose your `service_role` key in the browser.
|
|
*/
|
|
async updateUserById(uid, attributes) {
|
|
validateUUID(uid);
|
|
try {
|
|
return await _request(this.fetch, 'PUT', `${this.url}/admin/users/${uid}`, {
|
|
body: attributes,
|
|
headers: this.headers,
|
|
xform: _userResponse,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Delete a user. Requires a `service_role` key.
|
|
*
|
|
* @param id The user id you want to remove.
|
|
* @param shouldSoftDelete If true, then the user will be soft-deleted from the auth schema. Soft deletion allows user identification from the hashed user ID but is not reversible.
|
|
* Defaults to false for backward compatibility.
|
|
*
|
|
* This function should only be called on a server. Never expose your `service_role` key in the browser.
|
|
*/
|
|
async deleteUser(id, shouldSoftDelete = false) {
|
|
validateUUID(id);
|
|
try {
|
|
return await _request(this.fetch, 'DELETE', `${this.url}/admin/users/${id}`, {
|
|
headers: this.headers,
|
|
body: {
|
|
should_soft_delete: shouldSoftDelete,
|
|
},
|
|
xform: _userResponse,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
async _listFactors(params) {
|
|
validateUUID(params.userId);
|
|
try {
|
|
const { data, error } = await _request(this.fetch, 'GET', `${this.url}/admin/users/${params.userId}/factors`, {
|
|
headers: this.headers,
|
|
xform: (factors) => {
|
|
return { data: { factors }, error: null };
|
|
},
|
|
});
|
|
return { data, error };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
async _deleteFactor(params) {
|
|
validateUUID(params.userId);
|
|
validateUUID(params.id);
|
|
try {
|
|
const data = await _request(this.fetch, 'DELETE', `${this.url}/admin/users/${params.userId}/factors/${params.id}`, {
|
|
headers: this.headers,
|
|
});
|
|
return { data, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Returns a localStorage-like object that stores the key-value pairs in
|
|
* memory.
|
|
*/
|
|
function memoryLocalStorageAdapter(store = {}) {
|
|
return {
|
|
getItem: (key) => {
|
|
return store[key] || null;
|
|
},
|
|
setItem: (key, value) => {
|
|
store[key] = value;
|
|
},
|
|
removeItem: (key) => {
|
|
delete store[key];
|
|
},
|
|
};
|
|
}
|
|
|
|
/**
|
|
* https://mathiasbynens.be/notes/globalthis
|
|
*/
|
|
function polyfillGlobalThis() {
|
|
if (typeof globalThis === 'object')
|
|
return;
|
|
try {
|
|
Object.defineProperty(Object.prototype, '__magic__', {
|
|
get: function () {
|
|
return this;
|
|
},
|
|
configurable: true,
|
|
});
|
|
// @ts-expect-error 'Allow access to magic'
|
|
__magic__.globalThis = __magic__;
|
|
// @ts-expect-error 'Allow access to magic'
|
|
delete Object.prototype.__magic__;
|
|
}
|
|
catch (e) {
|
|
if (typeof self !== 'undefined') {
|
|
// @ts-expect-error 'Allow access to globals'
|
|
self.globalThis = self;
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @experimental
|
|
*/
|
|
const internals = {
|
|
/**
|
|
* @experimental
|
|
*/
|
|
debug: !!(globalThis &&
|
|
supportsLocalStorage() &&
|
|
globalThis.localStorage &&
|
|
globalThis.localStorage.getItem('supabase.gotrue-js.locks.debug') === 'true'),
|
|
};
|
|
/**
|
|
* An error thrown when a lock cannot be acquired after some amount of time.
|
|
*
|
|
* Use the {@link #isAcquireTimeout} property instead of checking with `instanceof`.
|
|
*/
|
|
class LockAcquireTimeoutError extends Error {
|
|
constructor(message) {
|
|
super(message);
|
|
this.isAcquireTimeout = true;
|
|
}
|
|
}
|
|
class NavigatorLockAcquireTimeoutError extends LockAcquireTimeoutError {
|
|
}
|
|
/**
|
|
* Implements a global exclusive lock using the Navigator LockManager API. It
|
|
* is available on all browsers released after 2022-03-15 with Safari being the
|
|
* last one to release support. If the API is not available, this function will
|
|
* throw. Make sure you check availablility before configuring {@link
|
|
* GoTrueClient}.
|
|
*
|
|
* You can turn on debugging by setting the `supabase.gotrue-js.locks.debug`
|
|
* local storage item to `true`.
|
|
*
|
|
* Internals:
|
|
*
|
|
* Since the LockManager API does not preserve stack traces for the async
|
|
* function passed in the `request` method, a trick is used where acquiring the
|
|
* lock releases a previously started promise to run the operation in the `fn`
|
|
* function. The lock waits for that promise to finish (with or without error),
|
|
* while the function will finally wait for the result anyway.
|
|
*
|
|
* @param name Name of the lock to be acquired.
|
|
* @param acquireTimeout If negative, no timeout. If 0 an error is thrown if
|
|
* the lock can't be acquired without waiting. If positive, the lock acquire
|
|
* will time out after so many milliseconds. An error is
|
|
* a timeout if it has `isAcquireTimeout` set to true.
|
|
* @param fn The operation to run once the lock is acquired.
|
|
*/
|
|
async function navigatorLock(name, acquireTimeout, fn) {
|
|
if (internals.debug) {
|
|
console.log('@supabase/gotrue-js: navigatorLock: acquire lock', name, acquireTimeout);
|
|
}
|
|
const abortController = new globalThis.AbortController();
|
|
if (acquireTimeout > 0) {
|
|
setTimeout(() => {
|
|
abortController.abort();
|
|
if (internals.debug) {
|
|
console.log('@supabase/gotrue-js: navigatorLock acquire timed out', name);
|
|
}
|
|
}, acquireTimeout);
|
|
}
|
|
// MDN article: https://developer.mozilla.org/en-US/docs/Web/API/LockManager/request
|
|
// Wrapping navigator.locks.request() with a plain Promise is done as some
|
|
// libraries like zone.js patch the Promise object to track the execution
|
|
// context. However, it appears that most browsers use an internal promise
|
|
// implementation when using the navigator.locks.request() API causing them
|
|
// to lose context and emit confusing log messages or break certain features.
|
|
// This wrapping is believed to help zone.js track the execution context
|
|
// better.
|
|
return await Promise.resolve().then(() => globalThis.navigator.locks.request(name, acquireTimeout === 0
|
|
? {
|
|
mode: 'exclusive',
|
|
ifAvailable: true,
|
|
}
|
|
: {
|
|
mode: 'exclusive',
|
|
signal: abortController.signal,
|
|
}, async (lock) => {
|
|
if (lock) {
|
|
if (internals.debug) {
|
|
console.log('@supabase/gotrue-js: navigatorLock: acquired', name, lock.name);
|
|
}
|
|
try {
|
|
return await fn();
|
|
}
|
|
finally {
|
|
if (internals.debug) {
|
|
console.log('@supabase/gotrue-js: navigatorLock: released', name, lock.name);
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
if (acquireTimeout === 0) {
|
|
if (internals.debug) {
|
|
console.log('@supabase/gotrue-js: navigatorLock: not immediately available', name);
|
|
}
|
|
throw new NavigatorLockAcquireTimeoutError(`Acquiring an exclusive Navigator LockManager lock "${name}" immediately failed`);
|
|
}
|
|
else {
|
|
if (internals.debug) {
|
|
try {
|
|
const result = await globalThis.navigator.locks.query();
|
|
console.log('@supabase/gotrue-js: Navigator LockManager state', JSON.stringify(result, null, ' '));
|
|
}
|
|
catch (e) {
|
|
console.warn('@supabase/gotrue-js: Error when querying Navigator LockManager state', e);
|
|
}
|
|
}
|
|
// Browser is not following the Navigator LockManager spec, it
|
|
// returned a null lock when we didn't use ifAvailable. So we can
|
|
// pretend the lock is acquired in the name of backward compatibility
|
|
// and user experience and just run the function.
|
|
console.warn('@supabase/gotrue-js: Navigator LockManager returned a null lock when using #request without ifAvailable set to true, it appears this browser is not following the LockManager spec https://developer.mozilla.org/en-US/docs/Web/API/LockManager/request');
|
|
return await fn();
|
|
}
|
|
}
|
|
}));
|
|
}
|
|
|
|
polyfillGlobalThis(); // Make "globalThis" available
|
|
const DEFAULT_OPTIONS = {
|
|
url: GOTRUE_URL,
|
|
storageKey: STORAGE_KEY,
|
|
autoRefreshToken: true,
|
|
persistSession: true,
|
|
detectSessionInUrl: true,
|
|
headers: DEFAULT_HEADERS,
|
|
flowType: 'implicit',
|
|
debug: false,
|
|
hasCustomAuthorizationHeader: false,
|
|
};
|
|
async function lockNoOp(name, acquireTimeout, fn) {
|
|
return await fn();
|
|
}
|
|
/**
|
|
* Caches JWKS values for all clients created in the same environment. This is
|
|
* especially useful for shared-memory execution environments such as Vercel's
|
|
* Fluid Compute, AWS Lambda or Supabase's Edge Functions. Regardless of how
|
|
* many clients are created, if they share the same storage key they will use
|
|
* the same JWKS cache, significantly speeding up getClaims() with asymmetric
|
|
* JWTs.
|
|
*/
|
|
const GLOBAL_JWKS = {};
|
|
class GoTrueClient {
|
|
/**
|
|
* Create a new client for use in the browser.
|
|
*/
|
|
constructor(options) {
|
|
var _a, _b;
|
|
/**
|
|
* @experimental
|
|
*/
|
|
this.userStorage = null;
|
|
this.memoryStorage = null;
|
|
this.stateChangeEmitters = new Map();
|
|
this.autoRefreshTicker = null;
|
|
this.visibilityChangedCallback = null;
|
|
this.refreshingDeferred = null;
|
|
/**
|
|
* Keeps track of the async client initialization.
|
|
* When null or not yet resolved the auth state is `unknown`
|
|
* Once resolved the the auth state is known and it's save to call any further client methods.
|
|
* Keep extra care to never reject or throw uncaught errors
|
|
*/
|
|
this.initializePromise = null;
|
|
this.detectSessionInUrl = true;
|
|
this.hasCustomAuthorizationHeader = false;
|
|
this.suppressGetSessionWarning = false;
|
|
this.lockAcquired = false;
|
|
this.pendingInLock = [];
|
|
/**
|
|
* Used to broadcast state change events to other tabs listening.
|
|
*/
|
|
this.broadcastChannel = null;
|
|
this.logger = console.log;
|
|
this.instanceID = GoTrueClient.nextInstanceID;
|
|
GoTrueClient.nextInstanceID += 1;
|
|
if (this.instanceID > 0 && isBrowser()) {
|
|
console.warn('Multiple GoTrueClient instances detected in the same browser context. It is not an error, but this should be avoided as it may produce undefined behavior when used concurrently under the same storage key.');
|
|
}
|
|
const settings = Object.assign(Object.assign({}, DEFAULT_OPTIONS), options);
|
|
this.logDebugMessages = !!settings.debug;
|
|
if (typeof settings.debug === 'function') {
|
|
this.logger = settings.debug;
|
|
}
|
|
this.persistSession = settings.persistSession;
|
|
this.storageKey = settings.storageKey;
|
|
this.autoRefreshToken = settings.autoRefreshToken;
|
|
this.admin = new GoTrueAdminApi({
|
|
url: settings.url,
|
|
headers: settings.headers,
|
|
fetch: settings.fetch,
|
|
});
|
|
this.url = settings.url;
|
|
this.headers = settings.headers;
|
|
this.fetch = resolveFetch(settings.fetch);
|
|
this.lock = settings.lock || lockNoOp;
|
|
this.detectSessionInUrl = settings.detectSessionInUrl;
|
|
this.flowType = settings.flowType;
|
|
this.hasCustomAuthorizationHeader = settings.hasCustomAuthorizationHeader;
|
|
if (settings.lock) {
|
|
this.lock = settings.lock;
|
|
}
|
|
else if (isBrowser() && ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.navigator) === null || _a === void 0 ? void 0 : _a.locks)) {
|
|
this.lock = navigatorLock;
|
|
}
|
|
else {
|
|
this.lock = lockNoOp;
|
|
}
|
|
if (!this.jwks) {
|
|
this.jwks = { keys: [] };
|
|
this.jwks_cached_at = Number.MIN_SAFE_INTEGER;
|
|
}
|
|
this.mfa = {
|
|
verify: this._verify.bind(this),
|
|
enroll: this._enroll.bind(this),
|
|
unenroll: this._unenroll.bind(this),
|
|
challenge: this._challenge.bind(this),
|
|
listFactors: this._listFactors.bind(this),
|
|
challengeAndVerify: this._challengeAndVerify.bind(this),
|
|
getAuthenticatorAssuranceLevel: this._getAuthenticatorAssuranceLevel.bind(this),
|
|
};
|
|
if (this.persistSession) {
|
|
if (settings.storage) {
|
|
this.storage = settings.storage;
|
|
}
|
|
else {
|
|
if (supportsLocalStorage()) {
|
|
this.storage = globalThis.localStorage;
|
|
}
|
|
else {
|
|
this.memoryStorage = {};
|
|
this.storage = memoryLocalStorageAdapter(this.memoryStorage);
|
|
}
|
|
}
|
|
if (settings.userStorage) {
|
|
this.userStorage = settings.userStorage;
|
|
}
|
|
}
|
|
else {
|
|
this.memoryStorage = {};
|
|
this.storage = memoryLocalStorageAdapter(this.memoryStorage);
|
|
}
|
|
if (isBrowser() && globalThis.BroadcastChannel && this.persistSession && this.storageKey) {
|
|
try {
|
|
this.broadcastChannel = new globalThis.BroadcastChannel(this.storageKey);
|
|
}
|
|
catch (e) {
|
|
console.error('Failed to create a new BroadcastChannel, multi-tab state changes will not be available', e);
|
|
}
|
|
(_b = this.broadcastChannel) === null || _b === void 0 ? void 0 : _b.addEventListener('message', async (event) => {
|
|
this._debug('received broadcast notification from other tab or client', event);
|
|
await this._notifyAllSubscribers(event.data.event, event.data.session, false); // broadcast = false so we don't get an endless loop of messages
|
|
});
|
|
}
|
|
this.initialize();
|
|
}
|
|
/**
|
|
* The JWKS used for verifying asymmetric JWTs
|
|
*/
|
|
get jwks() {
|
|
var _a, _b;
|
|
return (_b = (_a = GLOBAL_JWKS[this.storageKey]) === null || _a === void 0 ? void 0 : _a.jwks) !== null && _b !== void 0 ? _b : { keys: [] };
|
|
}
|
|
set jwks(value) {
|
|
GLOBAL_JWKS[this.storageKey] = Object.assign(Object.assign({}, GLOBAL_JWKS[this.storageKey]), { jwks: value });
|
|
}
|
|
get jwks_cached_at() {
|
|
var _a, _b;
|
|
return (_b = (_a = GLOBAL_JWKS[this.storageKey]) === null || _a === void 0 ? void 0 : _a.cachedAt) !== null && _b !== void 0 ? _b : Number.MIN_SAFE_INTEGER;
|
|
}
|
|
set jwks_cached_at(value) {
|
|
GLOBAL_JWKS[this.storageKey] = Object.assign(Object.assign({}, GLOBAL_JWKS[this.storageKey]), { cachedAt: value });
|
|
}
|
|
_debug(...args) {
|
|
if (this.logDebugMessages) {
|
|
this.logger(`GoTrueClient@${this.instanceID} (${version}) ${new Date().toISOString()}`, ...args);
|
|
}
|
|
return this;
|
|
}
|
|
/**
|
|
* Initializes the client session either from the url or from storage.
|
|
* This method is automatically called when instantiating the client, but should also be called
|
|
* manually when checking for an error from an auth redirect (oauth, magiclink, password recovery, etc).
|
|
*/
|
|
async initialize() {
|
|
if (this.initializePromise) {
|
|
return await this.initializePromise;
|
|
}
|
|
this.initializePromise = (async () => {
|
|
return await this._acquireLock(-1, async () => {
|
|
return await this._initialize();
|
|
});
|
|
})();
|
|
return await this.initializePromise;
|
|
}
|
|
/**
|
|
* IMPORTANT:
|
|
* 1. Never throw in this method, as it is called from the constructor
|
|
* 2. Never return a session from this method as it would be cached over
|
|
* the whole lifetime of the client
|
|
*/
|
|
async _initialize() {
|
|
var _a;
|
|
try {
|
|
const params = parseParametersFromURL(window.location.href);
|
|
let callbackUrlType = 'none';
|
|
if (this._isImplicitGrantCallback(params)) {
|
|
callbackUrlType = 'implicit';
|
|
}
|
|
else if (await this._isPKCECallback(params)) {
|
|
callbackUrlType = 'pkce';
|
|
}
|
|
/**
|
|
* Attempt to get the session from the URL only if these conditions are fulfilled
|
|
*
|
|
* Note: If the URL isn't one of the callback url types (implicit or pkce),
|
|
* then there could be an existing session so we don't want to prematurely remove it
|
|
*/
|
|
if (isBrowser() && this.detectSessionInUrl && callbackUrlType !== 'none') {
|
|
const { data, error } = await this._getSessionFromURL(params, callbackUrlType);
|
|
if (error) {
|
|
this._debug('#_initialize()', 'error detecting session from URL', error);
|
|
if (isAuthImplicitGrantRedirectError(error)) {
|
|
const errorCode = (_a = error.details) === null || _a === void 0 ? void 0 : _a.code;
|
|
if (errorCode === 'identity_already_exists' ||
|
|
errorCode === 'identity_not_found' ||
|
|
errorCode === 'single_identity_not_deletable') {
|
|
return { error };
|
|
}
|
|
}
|
|
// failed login attempt via url,
|
|
// remove old session as in verifyOtp, signUp and signInWith*
|
|
await this._removeSession();
|
|
return { error };
|
|
}
|
|
const { session, redirectType } = data;
|
|
this._debug('#_initialize()', 'detected session in URL', session, 'redirect type', redirectType);
|
|
await this._saveSession(session);
|
|
setTimeout(async () => {
|
|
if (redirectType === 'recovery') {
|
|
await this._notifyAllSubscribers('PASSWORD_RECOVERY', session);
|
|
}
|
|
else {
|
|
await this._notifyAllSubscribers('SIGNED_IN', session);
|
|
}
|
|
}, 0);
|
|
return { error: null };
|
|
}
|
|
// no login attempt via callback url try to recover session from storage
|
|
await this._recoverAndRefresh();
|
|
return { error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { error };
|
|
}
|
|
return {
|
|
error: new AuthUnknownError('Unexpected error during initialization', error),
|
|
};
|
|
}
|
|
finally {
|
|
await this._handleVisibilityChange();
|
|
this._debug('#_initialize()', 'end');
|
|
}
|
|
}
|
|
/**
|
|
* Creates a new anonymous user.
|
|
*
|
|
* @returns A session where the is_anonymous claim in the access token JWT set to true
|
|
*/
|
|
async signInAnonymously(credentials) {
|
|
var _a, _b, _c;
|
|
try {
|
|
const res = await _request(this.fetch, 'POST', `${this.url}/signup`, {
|
|
headers: this.headers,
|
|
body: {
|
|
data: (_b = (_a = credentials === null || credentials === void 0 ? void 0 : credentials.options) === null || _a === void 0 ? void 0 : _a.data) !== null && _b !== void 0 ? _b : {},
|
|
gotrue_meta_security: { captcha_token: (_c = credentials === null || credentials === void 0 ? void 0 : credentials.options) === null || _c === void 0 ? void 0 : _c.captchaToken },
|
|
},
|
|
xform: _sessionResponse,
|
|
});
|
|
const { data, error } = res;
|
|
if (error || !data) {
|
|
return { data: { user: null, session: null }, error: error };
|
|
}
|
|
const session = data.session;
|
|
const user = data.user;
|
|
if (data.session) {
|
|
await this._saveSession(data.session);
|
|
await this._notifyAllSubscribers('SIGNED_IN', session);
|
|
}
|
|
return { data: { user, session }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Creates a new user.
|
|
*
|
|
* Be aware that if a user account exists in the system you may get back an
|
|
* error message that attempts to hide this information from the user.
|
|
* This method has support for PKCE via email signups. The PKCE flow cannot be used when autoconfirm is enabled.
|
|
*
|
|
* @returns A logged-in session if the server has "autoconfirm" ON
|
|
* @returns A user if the server has "autoconfirm" OFF
|
|
*/
|
|
async signUp(credentials) {
|
|
var _a, _b, _c;
|
|
try {
|
|
let res;
|
|
if ('email' in credentials) {
|
|
const { email, password, options } = credentials;
|
|
let codeChallenge = null;
|
|
let codeChallengeMethod = null;
|
|
if (this.flowType === 'pkce') {
|
|
;
|
|
[codeChallenge, codeChallengeMethod] = await getCodeChallengeAndMethod(this.storage, this.storageKey);
|
|
}
|
|
res = await _request(this.fetch, 'POST', `${this.url}/signup`, {
|
|
headers: this.headers,
|
|
redirectTo: options === null || options === void 0 ? void 0 : options.emailRedirectTo,
|
|
body: {
|
|
email,
|
|
password,
|
|
data: (_a = options === null || options === void 0 ? void 0 : options.data) !== null && _a !== void 0 ? _a : {},
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
code_challenge: codeChallenge,
|
|
code_challenge_method: codeChallengeMethod,
|
|
},
|
|
xform: _sessionResponse,
|
|
});
|
|
}
|
|
else if ('phone' in credentials) {
|
|
const { phone, password, options } = credentials;
|
|
res = await _request(this.fetch, 'POST', `${this.url}/signup`, {
|
|
headers: this.headers,
|
|
body: {
|
|
phone,
|
|
password,
|
|
data: (_b = options === null || options === void 0 ? void 0 : options.data) !== null && _b !== void 0 ? _b : {},
|
|
channel: (_c = options === null || options === void 0 ? void 0 : options.channel) !== null && _c !== void 0 ? _c : 'sms',
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
},
|
|
xform: _sessionResponse,
|
|
});
|
|
}
|
|
else {
|
|
throw new AuthInvalidCredentialsError('You must provide either an email or phone number and a password');
|
|
}
|
|
const { data, error } = res;
|
|
if (error || !data) {
|
|
return { data: { user: null, session: null }, error: error };
|
|
}
|
|
const session = data.session;
|
|
const user = data.user;
|
|
if (data.session) {
|
|
await this._saveSession(data.session);
|
|
await this._notifyAllSubscribers('SIGNED_IN', session);
|
|
}
|
|
return { data: { user, session }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Log in an existing user with an email and password or phone and password.
|
|
*
|
|
* Be aware that you may get back an error message that will not distinguish
|
|
* between the cases where the account does not exist or that the
|
|
* email/phone and password combination is wrong or that the account can only
|
|
* be accessed via social login.
|
|
*/
|
|
async signInWithPassword(credentials) {
|
|
try {
|
|
let res;
|
|
if ('email' in credentials) {
|
|
const { email, password, options } = credentials;
|
|
res = await _request(this.fetch, 'POST', `${this.url}/token?grant_type=password`, {
|
|
headers: this.headers,
|
|
body: {
|
|
email,
|
|
password,
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
},
|
|
xform: _sessionResponsePassword,
|
|
});
|
|
}
|
|
else if ('phone' in credentials) {
|
|
const { phone, password, options } = credentials;
|
|
res = await _request(this.fetch, 'POST', `${this.url}/token?grant_type=password`, {
|
|
headers: this.headers,
|
|
body: {
|
|
phone,
|
|
password,
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
},
|
|
xform: _sessionResponsePassword,
|
|
});
|
|
}
|
|
else {
|
|
throw new AuthInvalidCredentialsError('You must provide either an email or phone number and a password');
|
|
}
|
|
const { data, error } = res;
|
|
if (error) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
else if (!data || !data.session || !data.user) {
|
|
return { data: { user: null, session: null }, error: new AuthInvalidTokenResponseError() };
|
|
}
|
|
if (data.session) {
|
|
await this._saveSession(data.session);
|
|
await this._notifyAllSubscribers('SIGNED_IN', data.session);
|
|
}
|
|
return {
|
|
data: Object.assign({ user: data.user, session: data.session }, (data.weak_password ? { weakPassword: data.weak_password } : null)),
|
|
error,
|
|
};
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Log in an existing user via a third-party provider.
|
|
* This method supports the PKCE flow.
|
|
*/
|
|
async signInWithOAuth(credentials) {
|
|
var _a, _b, _c, _d;
|
|
return await this._handleProviderSignIn(credentials.provider, {
|
|
redirectTo: (_a = credentials.options) === null || _a === void 0 ? void 0 : _a.redirectTo,
|
|
scopes: (_b = credentials.options) === null || _b === void 0 ? void 0 : _b.scopes,
|
|
queryParams: (_c = credentials.options) === null || _c === void 0 ? void 0 : _c.queryParams,
|
|
skipBrowserRedirect: (_d = credentials.options) === null || _d === void 0 ? void 0 : _d.skipBrowserRedirect,
|
|
});
|
|
}
|
|
/**
|
|
* Log in an existing user by exchanging an Auth Code issued during the PKCE flow.
|
|
*/
|
|
async exchangeCodeForSession(authCode) {
|
|
await this.initializePromise;
|
|
return this._acquireLock(-1, async () => {
|
|
return this._exchangeCodeForSession(authCode);
|
|
});
|
|
}
|
|
/**
|
|
* Signs in a user by verifying a message signed by the user's private key.
|
|
* Only Solana supported at this time, using the Sign in with Solana standard.
|
|
*/
|
|
async signInWithWeb3(credentials) {
|
|
const { chain } = credentials;
|
|
if (chain === 'solana') {
|
|
return await this.signInWithSolana(credentials);
|
|
}
|
|
throw new Error(`@supabase/auth-js: Unsupported chain "${chain}"`);
|
|
}
|
|
async signInWithSolana(credentials) {
|
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m;
|
|
let message;
|
|
let signature;
|
|
if ('message' in credentials) {
|
|
message = credentials.message;
|
|
signature = credentials.signature;
|
|
}
|
|
else {
|
|
const { chain, wallet, statement, options } = credentials;
|
|
let resolvedWallet;
|
|
if (!isBrowser()) {
|
|
if (typeof wallet !== 'object' || !(options === null || options === void 0 ? void 0 : options.url)) {
|
|
throw new Error('@supabase/auth-js: Both wallet and url must be specified in non-browser environments.');
|
|
}
|
|
resolvedWallet = wallet;
|
|
}
|
|
else if (typeof wallet === 'object') {
|
|
resolvedWallet = wallet;
|
|
}
|
|
else {
|
|
const windowAny = window;
|
|
if ('solana' in windowAny &&
|
|
typeof windowAny.solana === 'object' &&
|
|
(('signIn' in windowAny.solana && typeof windowAny.solana.signIn === 'function') ||
|
|
('signMessage' in windowAny.solana &&
|
|
typeof windowAny.solana.signMessage === 'function'))) {
|
|
resolvedWallet = windowAny.solana;
|
|
}
|
|
else {
|
|
throw new Error(`@supabase/auth-js: No compatible Solana wallet interface on the window object (window.solana) detected. Make sure the user already has a wallet installed and connected for this app. Prefer passing the wallet interface object directly to signInWithWeb3({ chain: 'solana', wallet: resolvedUserWallet }) instead.`);
|
|
}
|
|
}
|
|
const url = new URL((_a = options === null || options === void 0 ? void 0 : options.url) !== null && _a !== void 0 ? _a : window.location.href);
|
|
if ('signIn' in resolvedWallet && resolvedWallet.signIn) {
|
|
const output = await resolvedWallet.signIn(Object.assign(Object.assign(Object.assign({ issuedAt: new Date().toISOString() }, options === null || options === void 0 ? void 0 : options.signInWithSolana), {
|
|
// non-overridable properties
|
|
version: '1', domain: url.host, uri: url.href }), (statement ? { statement } : null)));
|
|
let outputToProcess;
|
|
if (Array.isArray(output) && output[0] && typeof output[0] === 'object') {
|
|
outputToProcess = output[0];
|
|
}
|
|
else if (output &&
|
|
typeof output === 'object' &&
|
|
'signedMessage' in output &&
|
|
'signature' in output) {
|
|
outputToProcess = output;
|
|
}
|
|
else {
|
|
throw new Error('@supabase/auth-js: Wallet method signIn() returned unrecognized value');
|
|
}
|
|
if ('signedMessage' in outputToProcess &&
|
|
'signature' in outputToProcess &&
|
|
(typeof outputToProcess.signedMessage === 'string' ||
|
|
outputToProcess.signedMessage instanceof Uint8Array) &&
|
|
outputToProcess.signature instanceof Uint8Array) {
|
|
message =
|
|
typeof outputToProcess.signedMessage === 'string'
|
|
? outputToProcess.signedMessage
|
|
: new TextDecoder().decode(outputToProcess.signedMessage);
|
|
signature = outputToProcess.signature;
|
|
}
|
|
else {
|
|
throw new Error('@supabase/auth-js: Wallet method signIn() API returned object without signedMessage and signature fields');
|
|
}
|
|
}
|
|
else {
|
|
if (!('signMessage' in resolvedWallet) ||
|
|
typeof resolvedWallet.signMessage !== 'function' ||
|
|
!('publicKey' in resolvedWallet) ||
|
|
typeof resolvedWallet !== 'object' ||
|
|
!resolvedWallet.publicKey ||
|
|
!('toBase58' in resolvedWallet.publicKey) ||
|
|
typeof resolvedWallet.publicKey.toBase58 !== 'function') {
|
|
throw new Error('@supabase/auth-js: Wallet does not have a compatible signMessage() and publicKey.toBase58() API');
|
|
}
|
|
message = [
|
|
`${url.host} wants you to sign in with your Solana account:`,
|
|
resolvedWallet.publicKey.toBase58(),
|
|
...(statement ? ['', statement, ''] : ['']),
|
|
'Version: 1',
|
|
`URI: ${url.href}`,
|
|
`Issued At: ${(_c = (_b = options === null || options === void 0 ? void 0 : options.signInWithSolana) === null || _b === void 0 ? void 0 : _b.issuedAt) !== null && _c !== void 0 ? _c : new Date().toISOString()}`,
|
|
...(((_d = options === null || options === void 0 ? void 0 : options.signInWithSolana) === null || _d === void 0 ? void 0 : _d.notBefore)
|
|
? [`Not Before: ${options.signInWithSolana.notBefore}`]
|
|
: []),
|
|
...(((_e = options === null || options === void 0 ? void 0 : options.signInWithSolana) === null || _e === void 0 ? void 0 : _e.expirationTime)
|
|
? [`Expiration Time: ${options.signInWithSolana.expirationTime}`]
|
|
: []),
|
|
...(((_f = options === null || options === void 0 ? void 0 : options.signInWithSolana) === null || _f === void 0 ? void 0 : _f.chainId)
|
|
? [`Chain ID: ${options.signInWithSolana.chainId}`]
|
|
: []),
|
|
...(((_g = options === null || options === void 0 ? void 0 : options.signInWithSolana) === null || _g === void 0 ? void 0 : _g.nonce) ? [`Nonce: ${options.signInWithSolana.nonce}`] : []),
|
|
...(((_h = options === null || options === void 0 ? void 0 : options.signInWithSolana) === null || _h === void 0 ? void 0 : _h.requestId)
|
|
? [`Request ID: ${options.signInWithSolana.requestId}`]
|
|
: []),
|
|
...(((_k = (_j = options === null || options === void 0 ? void 0 : options.signInWithSolana) === null || _j === void 0 ? void 0 : _j.resources) === null || _k === void 0 ? void 0 : _k.length)
|
|
? [
|
|
'Resources',
|
|
...options.signInWithSolana.resources.map((resource) => `- ${resource}`),
|
|
]
|
|
: []),
|
|
].join('\n');
|
|
const maybeSignature = await resolvedWallet.signMessage(new TextEncoder().encode(message), 'utf8');
|
|
if (!maybeSignature || !(maybeSignature instanceof Uint8Array)) {
|
|
throw new Error('@supabase/auth-js: Wallet signMessage() API returned an recognized value');
|
|
}
|
|
signature = maybeSignature;
|
|
}
|
|
}
|
|
try {
|
|
const { data, error } = await _request(this.fetch, 'POST', `${this.url}/token?grant_type=web3`, {
|
|
headers: this.headers,
|
|
body: Object.assign({ chain: 'solana', message, signature: bytesToBase64URL(signature) }, (((_l = credentials.options) === null || _l === void 0 ? void 0 : _l.captchaToken)
|
|
? { gotrue_meta_security: { captcha_token: (_m = credentials.options) === null || _m === void 0 ? void 0 : _m.captchaToken } }
|
|
: null)),
|
|
xform: _sessionResponse,
|
|
});
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
if (!data || !data.session || !data.user) {
|
|
return {
|
|
data: { user: null, session: null },
|
|
error: new AuthInvalidTokenResponseError(),
|
|
};
|
|
}
|
|
if (data.session) {
|
|
await this._saveSession(data.session);
|
|
await this._notifyAllSubscribers('SIGNED_IN', data.session);
|
|
}
|
|
return { data: Object.assign({}, data), error };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
async _exchangeCodeForSession(authCode) {
|
|
const storageItem = await getItemAsync(this.storage, `${this.storageKey}-code-verifier`);
|
|
const [codeVerifier, redirectType] = (storageItem !== null && storageItem !== void 0 ? storageItem : '').split('/');
|
|
try {
|
|
const { data, error } = await _request(this.fetch, 'POST', `${this.url}/token?grant_type=pkce`, {
|
|
headers: this.headers,
|
|
body: {
|
|
auth_code: authCode,
|
|
code_verifier: codeVerifier,
|
|
},
|
|
xform: _sessionResponse,
|
|
});
|
|
await removeItemAsync(this.storage, `${this.storageKey}-code-verifier`);
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
if (!data || !data.session || !data.user) {
|
|
return {
|
|
data: { user: null, session: null, redirectType: null },
|
|
error: new AuthInvalidTokenResponseError(),
|
|
};
|
|
}
|
|
if (data.session) {
|
|
await this._saveSession(data.session);
|
|
await this._notifyAllSubscribers('SIGNED_IN', data.session);
|
|
}
|
|
return { data: Object.assign(Object.assign({}, data), { redirectType: redirectType !== null && redirectType !== void 0 ? redirectType : null }), error };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null, redirectType: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Allows signing in with an OIDC ID token. The authentication provider used
|
|
* should be enabled and configured.
|
|
*/
|
|
async signInWithIdToken(credentials) {
|
|
try {
|
|
const { options, provider, token, access_token, nonce } = credentials;
|
|
const res = await _request(this.fetch, 'POST', `${this.url}/token?grant_type=id_token`, {
|
|
headers: this.headers,
|
|
body: {
|
|
provider,
|
|
id_token: token,
|
|
access_token,
|
|
nonce,
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
},
|
|
xform: _sessionResponse,
|
|
});
|
|
const { data, error } = res;
|
|
if (error) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
else if (!data || !data.session || !data.user) {
|
|
return {
|
|
data: { user: null, session: null },
|
|
error: new AuthInvalidTokenResponseError(),
|
|
};
|
|
}
|
|
if (data.session) {
|
|
await this._saveSession(data.session);
|
|
await this._notifyAllSubscribers('SIGNED_IN', data.session);
|
|
}
|
|
return { data, error };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Log in a user using magiclink or a one-time password (OTP).
|
|
*
|
|
* If the `{{ .ConfirmationURL }}` variable is specified in the email template, a magiclink will be sent.
|
|
* If the `{{ .Token }}` variable is specified in the email template, an OTP will be sent.
|
|
* If you're using phone sign-ins, only an OTP will be sent. You won't be able to send a magiclink for phone sign-ins.
|
|
*
|
|
* Be aware that you may get back an error message that will not distinguish
|
|
* between the cases where the account does not exist or, that the account
|
|
* can only be accessed via social login.
|
|
*
|
|
* Do note that you will need to configure a Whatsapp sender on Twilio
|
|
* if you are using phone sign in with the 'whatsapp' channel. The whatsapp
|
|
* channel is not supported on other providers
|
|
* at this time.
|
|
* This method supports PKCE when an email is passed.
|
|
*/
|
|
async signInWithOtp(credentials) {
|
|
var _a, _b, _c, _d, _e;
|
|
try {
|
|
if ('email' in credentials) {
|
|
const { email, options } = credentials;
|
|
let codeChallenge = null;
|
|
let codeChallengeMethod = null;
|
|
if (this.flowType === 'pkce') {
|
|
;
|
|
[codeChallenge, codeChallengeMethod] = await getCodeChallengeAndMethod(this.storage, this.storageKey);
|
|
}
|
|
const { error } = await _request(this.fetch, 'POST', `${this.url}/otp`, {
|
|
headers: this.headers,
|
|
body: {
|
|
email,
|
|
data: (_a = options === null || options === void 0 ? void 0 : options.data) !== null && _a !== void 0 ? _a : {},
|
|
create_user: (_b = options === null || options === void 0 ? void 0 : options.shouldCreateUser) !== null && _b !== void 0 ? _b : true,
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
code_challenge: codeChallenge,
|
|
code_challenge_method: codeChallengeMethod,
|
|
},
|
|
redirectTo: options === null || options === void 0 ? void 0 : options.emailRedirectTo,
|
|
});
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
if ('phone' in credentials) {
|
|
const { phone, options } = credentials;
|
|
const { data, error } = await _request(this.fetch, 'POST', `${this.url}/otp`, {
|
|
headers: this.headers,
|
|
body: {
|
|
phone,
|
|
data: (_c = options === null || options === void 0 ? void 0 : options.data) !== null && _c !== void 0 ? _c : {},
|
|
create_user: (_d = options === null || options === void 0 ? void 0 : options.shouldCreateUser) !== null && _d !== void 0 ? _d : true,
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
channel: (_e = options === null || options === void 0 ? void 0 : options.channel) !== null && _e !== void 0 ? _e : 'sms',
|
|
},
|
|
});
|
|
return { data: { user: null, session: null, messageId: data === null || data === void 0 ? void 0 : data.message_id }, error };
|
|
}
|
|
throw new AuthInvalidCredentialsError('You must provide either an email or phone number.');
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Log in a user given a User supplied OTP or TokenHash received through mobile or email.
|
|
*/
|
|
async verifyOtp(params) {
|
|
var _a, _b;
|
|
try {
|
|
let redirectTo = undefined;
|
|
let captchaToken = undefined;
|
|
if ('options' in params) {
|
|
redirectTo = (_a = params.options) === null || _a === void 0 ? void 0 : _a.redirectTo;
|
|
captchaToken = (_b = params.options) === null || _b === void 0 ? void 0 : _b.captchaToken;
|
|
}
|
|
const { data, error } = await _request(this.fetch, 'POST', `${this.url}/verify`, {
|
|
headers: this.headers,
|
|
body: Object.assign(Object.assign({}, params), { gotrue_meta_security: { captcha_token: captchaToken } }),
|
|
redirectTo,
|
|
xform: _sessionResponse,
|
|
});
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
if (!data) {
|
|
throw new Error('An error occurred on token verification.');
|
|
}
|
|
const session = data.session;
|
|
const user = data.user;
|
|
if (session === null || session === void 0 ? void 0 : session.access_token) {
|
|
await this._saveSession(session);
|
|
await this._notifyAllSubscribers(params.type == 'recovery' ? 'PASSWORD_RECOVERY' : 'SIGNED_IN', session);
|
|
}
|
|
return { data: { user, session }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Attempts a single-sign on using an enterprise Identity Provider. A
|
|
* successful SSO attempt will redirect the current page to the identity
|
|
* provider authorization page. The redirect URL is implementation and SSO
|
|
* protocol specific.
|
|
*
|
|
* You can use it by providing a SSO domain. Typically you can extract this
|
|
* domain by asking users for their email address. If this domain is
|
|
* registered on the Auth instance the redirect will use that organization's
|
|
* currently active SSO Identity Provider for the login.
|
|
*
|
|
* If you have built an organization-specific login page, you can use the
|
|
* organization's SSO Identity Provider UUID directly instead.
|
|
*/
|
|
async signInWithSSO(params) {
|
|
var _a, _b, _c;
|
|
try {
|
|
let codeChallenge = null;
|
|
let codeChallengeMethod = null;
|
|
if (this.flowType === 'pkce') {
|
|
;
|
|
[codeChallenge, codeChallengeMethod] = await getCodeChallengeAndMethod(this.storage, this.storageKey);
|
|
}
|
|
return await _request(this.fetch, 'POST', `${this.url}/sso`, {
|
|
body: Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, ('providerId' in params ? { provider_id: params.providerId } : null)), ('domain' in params ? { domain: params.domain } : null)), { redirect_to: (_b = (_a = params.options) === null || _a === void 0 ? void 0 : _a.redirectTo) !== null && _b !== void 0 ? _b : undefined }), (((_c = params === null || params === void 0 ? void 0 : params.options) === null || _c === void 0 ? void 0 : _c.captchaToken)
|
|
? { gotrue_meta_security: { captcha_token: params.options.captchaToken } }
|
|
: null)), { skip_http_redirect: true, code_challenge: codeChallenge, code_challenge_method: codeChallengeMethod }),
|
|
headers: this.headers,
|
|
xform: _ssoResponse,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Sends a reauthentication OTP to the user's email or phone number.
|
|
* Requires the user to be signed-in.
|
|
*/
|
|
async reauthenticate() {
|
|
await this.initializePromise;
|
|
return await this._acquireLock(-1, async () => {
|
|
return await this._reauthenticate();
|
|
});
|
|
}
|
|
async _reauthenticate() {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
const { data: { session }, error: sessionError, } = result;
|
|
if (sessionError)
|
|
throw sessionError;
|
|
if (!session)
|
|
throw new AuthSessionMissingError();
|
|
const { error } = await _request(this.fetch, 'GET', `${this.url}/reauthenticate`, {
|
|
headers: this.headers,
|
|
jwt: session.access_token,
|
|
});
|
|
return { data: { user: null, session: null }, error };
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Resends an existing signup confirmation email, email change email, SMS OTP or phone change OTP.
|
|
*/
|
|
async resend(credentials) {
|
|
try {
|
|
const endpoint = `${this.url}/resend`;
|
|
if ('email' in credentials) {
|
|
const { email, type, options } = credentials;
|
|
const { error } = await _request(this.fetch, 'POST', endpoint, {
|
|
headers: this.headers,
|
|
body: {
|
|
email,
|
|
type,
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
},
|
|
redirectTo: options === null || options === void 0 ? void 0 : options.emailRedirectTo,
|
|
});
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
else if ('phone' in credentials) {
|
|
const { phone, type, options } = credentials;
|
|
const { data, error } = await _request(this.fetch, 'POST', endpoint, {
|
|
headers: this.headers,
|
|
body: {
|
|
phone,
|
|
type,
|
|
gotrue_meta_security: { captcha_token: options === null || options === void 0 ? void 0 : options.captchaToken },
|
|
},
|
|
});
|
|
return { data: { user: null, session: null, messageId: data === null || data === void 0 ? void 0 : data.message_id }, error };
|
|
}
|
|
throw new AuthInvalidCredentialsError('You must provide either an email or phone number and a type');
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Returns the session, refreshing it if necessary.
|
|
*
|
|
* The session returned can be null if the session is not detected which can happen in the event a user is not signed-in or has logged out.
|
|
*
|
|
* **IMPORTANT:** This method loads values directly from the storage attached
|
|
* to the client. If that storage is based on request cookies for example,
|
|
* the values in it may not be authentic and therefore it's strongly advised
|
|
* against using this method and its results in such circumstances. A warning
|
|
* will be emitted if this is detected. Use {@link #getUser()} instead.
|
|
*/
|
|
async getSession() {
|
|
await this.initializePromise;
|
|
const result = await this._acquireLock(-1, async () => {
|
|
return this._useSession(async (result) => {
|
|
return result;
|
|
});
|
|
});
|
|
return result;
|
|
}
|
|
/**
|
|
* Acquires a global lock based on the storage key.
|
|
*/
|
|
async _acquireLock(acquireTimeout, fn) {
|
|
this._debug('#_acquireLock', 'begin', acquireTimeout);
|
|
try {
|
|
if (this.lockAcquired) {
|
|
const last = this.pendingInLock.length
|
|
? this.pendingInLock[this.pendingInLock.length - 1]
|
|
: Promise.resolve();
|
|
const result = (async () => {
|
|
await last;
|
|
return await fn();
|
|
})();
|
|
this.pendingInLock.push((async () => {
|
|
try {
|
|
await result;
|
|
}
|
|
catch (e) {
|
|
// we just care if it finished
|
|
}
|
|
})());
|
|
return result;
|
|
}
|
|
return await this.lock(`lock:${this.storageKey}`, acquireTimeout, async () => {
|
|
this._debug('#_acquireLock', 'lock acquired for storage key', this.storageKey);
|
|
try {
|
|
this.lockAcquired = true;
|
|
const result = fn();
|
|
this.pendingInLock.push((async () => {
|
|
try {
|
|
await result;
|
|
}
|
|
catch (e) {
|
|
// we just care if it finished
|
|
}
|
|
})());
|
|
await result;
|
|
// keep draining the queue until there's nothing to wait on
|
|
while (this.pendingInLock.length) {
|
|
const waitOn = [...this.pendingInLock];
|
|
await Promise.all(waitOn);
|
|
this.pendingInLock.splice(0, waitOn.length);
|
|
}
|
|
return await result;
|
|
}
|
|
finally {
|
|
this._debug('#_acquireLock', 'lock released for storage key', this.storageKey);
|
|
this.lockAcquired = false;
|
|
}
|
|
});
|
|
}
|
|
finally {
|
|
this._debug('#_acquireLock', 'end');
|
|
}
|
|
}
|
|
/**
|
|
* Use instead of {@link #getSession} inside the library. It is
|
|
* semantically usually what you want, as getting a session involves some
|
|
* processing afterwards that requires only one client operating on the
|
|
* session at once across multiple tabs or processes.
|
|
*/
|
|
async _useSession(fn) {
|
|
this._debug('#_useSession', 'begin');
|
|
try {
|
|
// the use of __loadSession here is the only correct use of the function!
|
|
const result = await this.__loadSession();
|
|
return await fn(result);
|
|
}
|
|
finally {
|
|
this._debug('#_useSession', 'end');
|
|
}
|
|
}
|
|
/**
|
|
* NEVER USE DIRECTLY!
|
|
*
|
|
* Always use {@link #_useSession}.
|
|
*/
|
|
async __loadSession() {
|
|
this._debug('#__loadSession()', 'begin');
|
|
if (!this.lockAcquired) {
|
|
this._debug('#__loadSession()', 'used outside of an acquired lock!', new Error().stack);
|
|
}
|
|
try {
|
|
let currentSession = null;
|
|
const maybeSession = await getItemAsync(this.storage, this.storageKey);
|
|
this._debug('#getSession()', 'session from storage', maybeSession);
|
|
if (maybeSession !== null) {
|
|
if (this._isValidSession(maybeSession)) {
|
|
currentSession = maybeSession;
|
|
}
|
|
else {
|
|
this._debug('#getSession()', 'session from storage is not valid');
|
|
await this._removeSession();
|
|
}
|
|
}
|
|
if (!currentSession) {
|
|
return { data: { session: null }, error: null };
|
|
}
|
|
// A session is considered expired before the access token _actually_
|
|
// expires. When the autoRefreshToken option is off (or when the tab is
|
|
// in the background), very eager users of getSession() -- like
|
|
// realtime-js -- might send a valid JWT which will expire by the time it
|
|
// reaches the server.
|
|
const hasExpired = currentSession.expires_at
|
|
? currentSession.expires_at * 1000 - Date.now() < EXPIRY_MARGIN_MS
|
|
: false;
|
|
this._debug('#__loadSession()', `session has${hasExpired ? '' : ' not'} expired`, 'expires_at', currentSession.expires_at);
|
|
if (!hasExpired) {
|
|
if (this.userStorage) {
|
|
const maybeUser = (await getItemAsync(this.userStorage, this.storageKey + '-user'));
|
|
if (maybeUser === null || maybeUser === void 0 ? void 0 : maybeUser.user) {
|
|
currentSession.user = maybeUser.user;
|
|
}
|
|
else {
|
|
currentSession.user = userNotAvailableProxy();
|
|
}
|
|
}
|
|
if (this.storage.isServer && currentSession.user) {
|
|
let suppressWarning = this.suppressGetSessionWarning;
|
|
const proxySession = new Proxy(currentSession, {
|
|
get: (target, prop, receiver) => {
|
|
if (!suppressWarning && prop === 'user') {
|
|
// only show warning when the user object is being accessed from the server
|
|
console.warn('Using the user object as returned from supabase.auth.getSession() or from some supabase.auth.onAuthStateChange() events could be insecure! This value comes directly from the storage medium (usually cookies on the server) and may not be authentic. Use supabase.auth.getUser() instead which authenticates the data by contacting the Supabase Auth server.');
|
|
suppressWarning = true; // keeps this proxy instance from logging additional warnings
|
|
this.suppressGetSessionWarning = true; // keeps this client's future proxy instances from warning
|
|
}
|
|
return Reflect.get(target, prop, receiver);
|
|
},
|
|
});
|
|
currentSession = proxySession;
|
|
}
|
|
return { data: { session: currentSession }, error: null };
|
|
}
|
|
const { session, error } = await this._callRefreshToken(currentSession.refresh_token);
|
|
if (error) {
|
|
return { data: { session: null }, error };
|
|
}
|
|
return { data: { session }, error: null };
|
|
}
|
|
finally {
|
|
this._debug('#__loadSession()', 'end');
|
|
}
|
|
}
|
|
/**
|
|
* Gets the current user details if there is an existing session. This method
|
|
* performs a network request to the Supabase Auth server, so the returned
|
|
* value is authentic and can be used to base authorization rules on.
|
|
*
|
|
* @param jwt Takes in an optional access token JWT. If no JWT is provided, the JWT from the current session is used.
|
|
*/
|
|
async getUser(jwt) {
|
|
if (jwt) {
|
|
return await this._getUser(jwt);
|
|
}
|
|
await this.initializePromise;
|
|
const result = await this._acquireLock(-1, async () => {
|
|
return await this._getUser();
|
|
});
|
|
return result;
|
|
}
|
|
async _getUser(jwt) {
|
|
try {
|
|
if (jwt) {
|
|
return await _request(this.fetch, 'GET', `${this.url}/user`, {
|
|
headers: this.headers,
|
|
jwt: jwt,
|
|
xform: _userResponse,
|
|
});
|
|
}
|
|
return await this._useSession(async (result) => {
|
|
var _a, _b, _c;
|
|
const { data, error } = result;
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
// returns an error if there is no access_token or custom authorization header
|
|
if (!((_a = data.session) === null || _a === void 0 ? void 0 : _a.access_token) && !this.hasCustomAuthorizationHeader) {
|
|
return { data: { user: null }, error: new AuthSessionMissingError() };
|
|
}
|
|
return await _request(this.fetch, 'GET', `${this.url}/user`, {
|
|
headers: this.headers,
|
|
jwt: (_c = (_b = data.session) === null || _b === void 0 ? void 0 : _b.access_token) !== null && _c !== void 0 ? _c : undefined,
|
|
xform: _userResponse,
|
|
});
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
if (isAuthSessionMissingError(error)) {
|
|
// JWT contains a `session_id` which does not correspond to an active
|
|
// session in the database, indicating the user is signed out.
|
|
await this._removeSession();
|
|
await removeItemAsync(this.storage, `${this.storageKey}-code-verifier`);
|
|
}
|
|
return { data: { user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Updates user data for a logged in user.
|
|
*/
|
|
async updateUser(attributes, options = {}) {
|
|
await this.initializePromise;
|
|
return await this._acquireLock(-1, async () => {
|
|
return await this._updateUser(attributes, options);
|
|
});
|
|
}
|
|
async _updateUser(attributes, options = {}) {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
const { data: sessionData, error: sessionError } = result;
|
|
if (sessionError) {
|
|
throw sessionError;
|
|
}
|
|
if (!sessionData.session) {
|
|
throw new AuthSessionMissingError();
|
|
}
|
|
const session = sessionData.session;
|
|
let codeChallenge = null;
|
|
let codeChallengeMethod = null;
|
|
if (this.flowType === 'pkce' && attributes.email != null) {
|
|
;
|
|
[codeChallenge, codeChallengeMethod] = await getCodeChallengeAndMethod(this.storage, this.storageKey);
|
|
}
|
|
const { data, error: userError } = await _request(this.fetch, 'PUT', `${this.url}/user`, {
|
|
headers: this.headers,
|
|
redirectTo: options === null || options === void 0 ? void 0 : options.emailRedirectTo,
|
|
body: Object.assign(Object.assign({}, attributes), { code_challenge: codeChallenge, code_challenge_method: codeChallengeMethod }),
|
|
jwt: session.access_token,
|
|
xform: _userResponse,
|
|
});
|
|
if (userError)
|
|
throw userError;
|
|
session.user = data.user;
|
|
await this._saveSession(session);
|
|
await this._notifyAllSubscribers('USER_UPDATED', session);
|
|
return { data: { user: session.user }, error: null };
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Sets the session data from the current session. If the current session is expired, setSession will take care of refreshing it to obtain a new session.
|
|
* If the refresh token or access token in the current session is invalid, an error will be thrown.
|
|
* @param currentSession The current session that minimally contains an access token and refresh token.
|
|
*/
|
|
async setSession(currentSession) {
|
|
await this.initializePromise;
|
|
return await this._acquireLock(-1, async () => {
|
|
return await this._setSession(currentSession);
|
|
});
|
|
}
|
|
async _setSession(currentSession) {
|
|
try {
|
|
if (!currentSession.access_token || !currentSession.refresh_token) {
|
|
throw new AuthSessionMissingError();
|
|
}
|
|
const timeNow = Date.now() / 1000;
|
|
let expiresAt = timeNow;
|
|
let hasExpired = true;
|
|
let session = null;
|
|
const { payload } = decodeJWT(currentSession.access_token);
|
|
if (payload.exp) {
|
|
expiresAt = payload.exp;
|
|
hasExpired = expiresAt <= timeNow;
|
|
}
|
|
if (hasExpired) {
|
|
const { session: refreshedSession, error } = await this._callRefreshToken(currentSession.refresh_token);
|
|
if (error) {
|
|
return { data: { user: null, session: null }, error: error };
|
|
}
|
|
if (!refreshedSession) {
|
|
return { data: { user: null, session: null }, error: null };
|
|
}
|
|
session = refreshedSession;
|
|
}
|
|
else {
|
|
const { data, error } = await this._getUser(currentSession.access_token);
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
session = {
|
|
access_token: currentSession.access_token,
|
|
refresh_token: currentSession.refresh_token,
|
|
user: data.user,
|
|
token_type: 'bearer',
|
|
expires_in: expiresAt - timeNow,
|
|
expires_at: expiresAt,
|
|
};
|
|
await this._saveSession(session);
|
|
await this._notifyAllSubscribers('SIGNED_IN', session);
|
|
}
|
|
return { data: { user: session.user, session }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { session: null, user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Returns a new session, regardless of expiry status.
|
|
* Takes in an optional current session. If not passed in, then refreshSession() will attempt to retrieve it from getSession().
|
|
* If the current session's refresh token is invalid, an error will be thrown.
|
|
* @param currentSession The current session. If passed in, it must contain a refresh token.
|
|
*/
|
|
async refreshSession(currentSession) {
|
|
await this.initializePromise;
|
|
return await this._acquireLock(-1, async () => {
|
|
return await this._refreshSession(currentSession);
|
|
});
|
|
}
|
|
async _refreshSession(currentSession) {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
var _a;
|
|
if (!currentSession) {
|
|
const { data, error } = result;
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
currentSession = (_a = data.session) !== null && _a !== void 0 ? _a : undefined;
|
|
}
|
|
if (!(currentSession === null || currentSession === void 0 ? void 0 : currentSession.refresh_token)) {
|
|
throw new AuthSessionMissingError();
|
|
}
|
|
const { session, error } = await this._callRefreshToken(currentSession.refresh_token);
|
|
if (error) {
|
|
return { data: { user: null, session: null }, error: error };
|
|
}
|
|
if (!session) {
|
|
return { data: { user: null, session: null }, error: null };
|
|
}
|
|
return { data: { user: session.user, session }, error: null };
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { user: null, session: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Gets the session data from a URL string
|
|
*/
|
|
async _getSessionFromURL(params, callbackUrlType) {
|
|
try {
|
|
if (!isBrowser())
|
|
throw new AuthImplicitGrantRedirectError('No browser detected.');
|
|
// If there's an error in the URL, it doesn't matter what flow it is, we just return the error.
|
|
if (params.error || params.error_description || params.error_code) {
|
|
// The error class returned implies that the redirect is from an implicit grant flow
|
|
// but it could also be from a redirect error from a PKCE flow.
|
|
throw new AuthImplicitGrantRedirectError(params.error_description || 'Error in URL with unspecified error_description', {
|
|
error: params.error || 'unspecified_error',
|
|
code: params.error_code || 'unspecified_code',
|
|
});
|
|
}
|
|
// Checks for mismatches between the flowType initialised in the client and the URL parameters
|
|
switch (callbackUrlType) {
|
|
case 'implicit':
|
|
if (this.flowType === 'pkce') {
|
|
throw new AuthPKCEGrantCodeExchangeError('Not a valid PKCE flow url.');
|
|
}
|
|
break;
|
|
case 'pkce':
|
|
if (this.flowType === 'implicit') {
|
|
throw new AuthImplicitGrantRedirectError('Not a valid implicit grant flow url.');
|
|
}
|
|
break;
|
|
default:
|
|
// there's no mismatch so we continue
|
|
}
|
|
// Since this is a redirect for PKCE, we attempt to retrieve the code from the URL for the code exchange
|
|
if (callbackUrlType === 'pkce') {
|
|
this._debug('#_initialize()', 'begin', 'is PKCE flow', true);
|
|
if (!params.code)
|
|
throw new AuthPKCEGrantCodeExchangeError('No code detected.');
|
|
const { data, error } = await this._exchangeCodeForSession(params.code);
|
|
if (error)
|
|
throw error;
|
|
const url = new URL(window.location.href);
|
|
url.searchParams.delete('code');
|
|
window.history.replaceState(window.history.state, '', url.toString());
|
|
return { data: { session: data.session, redirectType: null }, error: null };
|
|
}
|
|
const { provider_token, provider_refresh_token, access_token, refresh_token, expires_in, expires_at, token_type, } = params;
|
|
if (!access_token || !expires_in || !refresh_token || !token_type) {
|
|
throw new AuthImplicitGrantRedirectError('No session defined in URL');
|
|
}
|
|
const timeNow = Math.round(Date.now() / 1000);
|
|
const expiresIn = parseInt(expires_in);
|
|
let expiresAt = timeNow + expiresIn;
|
|
if (expires_at) {
|
|
expiresAt = parseInt(expires_at);
|
|
}
|
|
const actuallyExpiresIn = expiresAt - timeNow;
|
|
if (actuallyExpiresIn * 1000 <= AUTO_REFRESH_TICK_DURATION_MS) {
|
|
console.warn(`@supabase/gotrue-js: Session as retrieved from URL expires in ${actuallyExpiresIn}s, should have been closer to ${expiresIn}s`);
|
|
}
|
|
const issuedAt = expiresAt - expiresIn;
|
|
if (timeNow - issuedAt >= 120) {
|
|
console.warn('@supabase/gotrue-js: Session as retrieved from URL was issued over 120s ago, URL could be stale', issuedAt, expiresAt, timeNow);
|
|
}
|
|
else if (timeNow - issuedAt < 0) {
|
|
console.warn('@supabase/gotrue-js: Session as retrieved from URL was issued in the future? Check the device clock for skew', issuedAt, expiresAt, timeNow);
|
|
}
|
|
const { data, error } = await this._getUser(access_token);
|
|
if (error)
|
|
throw error;
|
|
const session = {
|
|
provider_token,
|
|
provider_refresh_token,
|
|
access_token,
|
|
expires_in: expiresIn,
|
|
expires_at: expiresAt,
|
|
refresh_token,
|
|
token_type,
|
|
user: data.user,
|
|
};
|
|
// Remove tokens from URL
|
|
window.location.hash = '';
|
|
this._debug('#_getSessionFromURL()', 'clearing window.location.hash');
|
|
return { data: { session, redirectType: params.type }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { session: null, redirectType: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Checks if the current URL contains parameters given by an implicit oauth grant flow (https://www.rfc-editor.org/rfc/rfc6749.html#section-4.2)
|
|
*/
|
|
_isImplicitGrantCallback(params) {
|
|
return Boolean(params.access_token || params.error_description);
|
|
}
|
|
/**
|
|
* Checks if the current URL and backing storage contain parameters given by a PKCE flow
|
|
*/
|
|
async _isPKCECallback(params) {
|
|
const currentStorageContent = await getItemAsync(this.storage, `${this.storageKey}-code-verifier`);
|
|
return !!(params.code && currentStorageContent);
|
|
}
|
|
/**
|
|
* Inside a browser context, `signOut()` will remove the logged in user from the browser session and log them out - removing all items from localstorage and then trigger a `"SIGNED_OUT"` event.
|
|
*
|
|
* For server-side management, you can revoke all refresh tokens for a user by passing a user's JWT through to `auth.api.signOut(JWT: string)`.
|
|
* There is no way to revoke a user's access token jwt until it expires. It is recommended to set a shorter expiry on the jwt for this reason.
|
|
*
|
|
* If using `others` scope, no `SIGNED_OUT` event is fired!
|
|
*/
|
|
async signOut(options = { scope: 'global' }) {
|
|
await this.initializePromise;
|
|
return await this._acquireLock(-1, async () => {
|
|
return await this._signOut(options);
|
|
});
|
|
}
|
|
async _signOut({ scope } = { scope: 'global' }) {
|
|
return await this._useSession(async (result) => {
|
|
var _a;
|
|
const { data, error: sessionError } = result;
|
|
if (sessionError) {
|
|
return { error: sessionError };
|
|
}
|
|
const accessToken = (_a = data.session) === null || _a === void 0 ? void 0 : _a.access_token;
|
|
if (accessToken) {
|
|
const { error } = await this.admin.signOut(accessToken, scope);
|
|
if (error) {
|
|
// ignore 404s since user might not exist anymore
|
|
// ignore 401s since an invalid or expired JWT should sign out the current session
|
|
if (!(isAuthApiError(error) &&
|
|
(error.status === 404 || error.status === 401 || error.status === 403))) {
|
|
return { error };
|
|
}
|
|
}
|
|
}
|
|
if (scope !== 'others') {
|
|
await this._removeSession();
|
|
await removeItemAsync(this.storage, `${this.storageKey}-code-verifier`);
|
|
}
|
|
return { error: null };
|
|
});
|
|
}
|
|
/**
|
|
* Receive a notification every time an auth event happens.
|
|
* @param callback A callback function to be invoked when an auth event happens.
|
|
*/
|
|
onAuthStateChange(callback) {
|
|
const id = uuid();
|
|
const subscription = {
|
|
id,
|
|
callback,
|
|
unsubscribe: () => {
|
|
this._debug('#unsubscribe()', 'state change callback with id removed', id);
|
|
this.stateChangeEmitters.delete(id);
|
|
},
|
|
};
|
|
this._debug('#onAuthStateChange()', 'registered callback with id', id);
|
|
this.stateChangeEmitters.set(id, subscription);
|
|
(async () => {
|
|
await this.initializePromise;
|
|
await this._acquireLock(-1, async () => {
|
|
this._emitInitialSession(id);
|
|
});
|
|
})();
|
|
return { data: { subscription } };
|
|
}
|
|
async _emitInitialSession(id) {
|
|
return await this._useSession(async (result) => {
|
|
var _a, _b;
|
|
try {
|
|
const { data: { session }, error, } = result;
|
|
if (error)
|
|
throw error;
|
|
await ((_a = this.stateChangeEmitters.get(id)) === null || _a === void 0 ? void 0 : _a.callback('INITIAL_SESSION', session));
|
|
this._debug('INITIAL_SESSION', 'callback id', id, 'session', session);
|
|
}
|
|
catch (err) {
|
|
await ((_b = this.stateChangeEmitters.get(id)) === null || _b === void 0 ? void 0 : _b.callback('INITIAL_SESSION', null));
|
|
this._debug('INITIAL_SESSION', 'callback id', id, 'error', err);
|
|
console.error(err);
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Sends a password reset request to an email address. This method supports the PKCE flow.
|
|
*
|
|
* @param email The email address of the user.
|
|
* @param options.redirectTo The URL to send the user to after they click the password reset link.
|
|
* @param options.captchaToken Verification token received when the user completes the captcha on the site.
|
|
*/
|
|
async resetPasswordForEmail(email, options = {}) {
|
|
let codeChallenge = null;
|
|
let codeChallengeMethod = null;
|
|
if (this.flowType === 'pkce') {
|
|
[codeChallenge, codeChallengeMethod] = await getCodeChallengeAndMethod(this.storage, this.storageKey, true // isPasswordRecovery
|
|
);
|
|
}
|
|
try {
|
|
return await _request(this.fetch, 'POST', `${this.url}/recover`, {
|
|
body: {
|
|
email,
|
|
code_challenge: codeChallenge,
|
|
code_challenge_method: codeChallengeMethod,
|
|
gotrue_meta_security: { captcha_token: options.captchaToken },
|
|
},
|
|
headers: this.headers,
|
|
redirectTo: options.redirectTo,
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Gets all the identities linked to a user.
|
|
*/
|
|
async getUserIdentities() {
|
|
var _a;
|
|
try {
|
|
const { data, error } = await this.getUser();
|
|
if (error)
|
|
throw error;
|
|
return { data: { identities: (_a = data.user.identities) !== null && _a !== void 0 ? _a : [] }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Links an oauth identity to an existing user.
|
|
* This method supports the PKCE flow.
|
|
*/
|
|
async linkIdentity(credentials) {
|
|
var _a;
|
|
try {
|
|
const { data, error } = await this._useSession(async (result) => {
|
|
var _a, _b, _c, _d, _e;
|
|
const { data, error } = result;
|
|
if (error)
|
|
throw error;
|
|
const url = await this._getUrlForProvider(`${this.url}/user/identities/authorize`, credentials.provider, {
|
|
redirectTo: (_a = credentials.options) === null || _a === void 0 ? void 0 : _a.redirectTo,
|
|
scopes: (_b = credentials.options) === null || _b === void 0 ? void 0 : _b.scopes,
|
|
queryParams: (_c = credentials.options) === null || _c === void 0 ? void 0 : _c.queryParams,
|
|
skipBrowserRedirect: true,
|
|
});
|
|
return await _request(this.fetch, 'GET', url, {
|
|
headers: this.headers,
|
|
jwt: (_e = (_d = data.session) === null || _d === void 0 ? void 0 : _d.access_token) !== null && _e !== void 0 ? _e : undefined,
|
|
});
|
|
});
|
|
if (error)
|
|
throw error;
|
|
if (isBrowser() && !((_a = credentials.options) === null || _a === void 0 ? void 0 : _a.skipBrowserRedirect)) {
|
|
window.location.assign(data === null || data === void 0 ? void 0 : data.url);
|
|
}
|
|
return { data: { provider: credentials.provider, url: data === null || data === void 0 ? void 0 : data.url }, error: null };
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: { provider: credentials.provider, url: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Unlinks an identity from a user by deleting it. The user will no longer be able to sign in with that identity once it's unlinked.
|
|
*/
|
|
async unlinkIdentity(identity) {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
var _a, _b;
|
|
const { data, error } = result;
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
return await _request(this.fetch, 'DELETE', `${this.url}/user/identities/${identity.identity_id}`, {
|
|
headers: this.headers,
|
|
jwt: (_b = (_a = data.session) === null || _a === void 0 ? void 0 : _a.access_token) !== null && _b !== void 0 ? _b : undefined,
|
|
});
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* Generates a new JWT.
|
|
* @param refreshToken A valid refresh token that was returned on login.
|
|
*/
|
|
async _refreshAccessToken(refreshToken) {
|
|
const debugName = `#_refreshAccessToken(${refreshToken.substring(0, 5)}...)`;
|
|
this._debug(debugName, 'begin');
|
|
try {
|
|
const startedAt = Date.now();
|
|
// will attempt to refresh the token with exponential backoff
|
|
return await retryable(async (attempt) => {
|
|
if (attempt > 0) {
|
|
await sleep(200 * Math.pow(2, attempt - 1)); // 200, 400, 800, ...
|
|
}
|
|
this._debug(debugName, 'refreshing attempt', attempt);
|
|
return await _request(this.fetch, 'POST', `${this.url}/token?grant_type=refresh_token`, {
|
|
body: { refresh_token: refreshToken },
|
|
headers: this.headers,
|
|
xform: _sessionResponse,
|
|
});
|
|
}, (attempt, error) => {
|
|
const nextBackOffInterval = 200 * Math.pow(2, attempt);
|
|
return (error &&
|
|
isAuthRetryableFetchError(error) &&
|
|
// retryable only if the request can be sent before the backoff overflows the tick duration
|
|
Date.now() + nextBackOffInterval - startedAt < AUTO_REFRESH_TICK_DURATION_MS);
|
|
});
|
|
}
|
|
catch (error) {
|
|
this._debug(debugName, 'error', error);
|
|
if (isAuthError(error)) {
|
|
return { data: { session: null, user: null }, error };
|
|
}
|
|
throw error;
|
|
}
|
|
finally {
|
|
this._debug(debugName, 'end');
|
|
}
|
|
}
|
|
_isValidSession(maybeSession) {
|
|
const isValidSession = typeof maybeSession === 'object' &&
|
|
maybeSession !== null &&
|
|
'access_token' in maybeSession &&
|
|
'refresh_token' in maybeSession &&
|
|
'expires_at' in maybeSession;
|
|
return isValidSession;
|
|
}
|
|
async _handleProviderSignIn(provider, options) {
|
|
const url = await this._getUrlForProvider(`${this.url}/authorize`, provider, {
|
|
redirectTo: options.redirectTo,
|
|
scopes: options.scopes,
|
|
queryParams: options.queryParams,
|
|
});
|
|
this._debug('#_handleProviderSignIn()', 'provider', provider, 'options', options, 'url', url);
|
|
// try to open on the browser
|
|
if (isBrowser() && !options.skipBrowserRedirect) {
|
|
window.location.assign(url);
|
|
}
|
|
return { data: { provider, url }, error: null };
|
|
}
|
|
/**
|
|
* Recovers the session from LocalStorage and refreshes the token
|
|
* Note: this method is async to accommodate for AsyncStorage e.g. in React native.
|
|
*/
|
|
async _recoverAndRefresh() {
|
|
var _a, _b;
|
|
const debugName = '#_recoverAndRefresh()';
|
|
this._debug(debugName, 'begin');
|
|
try {
|
|
const currentSession = (await getItemAsync(this.storage, this.storageKey));
|
|
if (currentSession && this.userStorage) {
|
|
let maybeUser = (await getItemAsync(this.userStorage, this.storageKey + '-user'));
|
|
if (!this.storage.isServer && Object.is(this.storage, this.userStorage) && !maybeUser) {
|
|
// storage and userStorage are the same storage medium, for example
|
|
// window.localStorage if userStorage does not have the user from
|
|
// storage stored, store it first thereby migrating the user object
|
|
// from storage -> userStorage
|
|
maybeUser = { user: currentSession.user };
|
|
await setItemAsync(this.userStorage, this.storageKey + '-user', maybeUser);
|
|
}
|
|
currentSession.user = (_a = maybeUser === null || maybeUser === void 0 ? void 0 : maybeUser.user) !== null && _a !== void 0 ? _a : userNotAvailableProxy();
|
|
}
|
|
else if (currentSession && !currentSession.user) {
|
|
// user storage is not set, let's check if it was previously enabled so
|
|
// we bring back the storage as it should be
|
|
if (!currentSession.user) {
|
|
// test if userStorage was previously enabled and the storage medium was the same, to move the user back under the same key
|
|
const separateUser = (await getItemAsync(this.storage, this.storageKey + '-user'));
|
|
if (separateUser && (separateUser === null || separateUser === void 0 ? void 0 : separateUser.user)) {
|
|
currentSession.user = separateUser.user;
|
|
await removeItemAsync(this.storage, this.storageKey + '-user');
|
|
await setItemAsync(this.storage, this.storageKey, currentSession);
|
|
}
|
|
else {
|
|
currentSession.user = userNotAvailableProxy();
|
|
}
|
|
}
|
|
}
|
|
this._debug(debugName, 'session from storage', currentSession);
|
|
if (!this._isValidSession(currentSession)) {
|
|
this._debug(debugName, 'session is not valid');
|
|
if (currentSession !== null) {
|
|
await this._removeSession();
|
|
}
|
|
return;
|
|
}
|
|
const expiresWithMargin = ((_b = currentSession.expires_at) !== null && _b !== void 0 ? _b : Infinity) * 1000 - Date.now() < EXPIRY_MARGIN_MS;
|
|
this._debug(debugName, `session has${expiresWithMargin ? '' : ' not'} expired with margin of ${EXPIRY_MARGIN_MS}s`);
|
|
if (expiresWithMargin) {
|
|
if (this.autoRefreshToken && currentSession.refresh_token) {
|
|
const { error } = await this._callRefreshToken(currentSession.refresh_token);
|
|
if (error) {
|
|
console.error(error);
|
|
if (!isAuthRetryableFetchError(error)) {
|
|
this._debug(debugName, 'refresh failed with a non-retryable error, removing the session', error);
|
|
await this._removeSession();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
else if (currentSession.user &&
|
|
currentSession.user.__isUserNotAvailableProxy === true) {
|
|
// If we have a proxy user, try to get the real user data
|
|
try {
|
|
const { data, error: userError } = await this._getUser(currentSession.access_token);
|
|
if (!userError && (data === null || data === void 0 ? void 0 : data.user)) {
|
|
currentSession.user = data.user;
|
|
await this._saveSession(currentSession);
|
|
await this._notifyAllSubscribers('SIGNED_IN', currentSession);
|
|
}
|
|
else {
|
|
this._debug(debugName, 'could not get user data, skipping SIGNED_IN notification');
|
|
}
|
|
}
|
|
catch (getUserError) {
|
|
console.error('Error getting user data:', getUserError);
|
|
this._debug(debugName, 'error getting user data, skipping SIGNED_IN notification', getUserError);
|
|
}
|
|
}
|
|
else {
|
|
// no need to persist currentSession again, as we just loaded it from
|
|
// local storage; persisting it again may overwrite a value saved by
|
|
// another client with access to the same local storage
|
|
await this._notifyAllSubscribers('SIGNED_IN', currentSession);
|
|
}
|
|
}
|
|
catch (err) {
|
|
this._debug(debugName, 'error', err);
|
|
console.error(err);
|
|
return;
|
|
}
|
|
finally {
|
|
this._debug(debugName, 'end');
|
|
}
|
|
}
|
|
async _callRefreshToken(refreshToken) {
|
|
var _a, _b;
|
|
if (!refreshToken) {
|
|
throw new AuthSessionMissingError();
|
|
}
|
|
// refreshing is already in progress
|
|
if (this.refreshingDeferred) {
|
|
return this.refreshingDeferred.promise;
|
|
}
|
|
const debugName = `#_callRefreshToken(${refreshToken.substring(0, 5)}...)`;
|
|
this._debug(debugName, 'begin');
|
|
try {
|
|
this.refreshingDeferred = new Deferred();
|
|
const { data, error } = await this._refreshAccessToken(refreshToken);
|
|
if (error)
|
|
throw error;
|
|
if (!data.session)
|
|
throw new AuthSessionMissingError();
|
|
await this._saveSession(data.session);
|
|
await this._notifyAllSubscribers('TOKEN_REFRESHED', data.session);
|
|
const result = { session: data.session, error: null };
|
|
this.refreshingDeferred.resolve(result);
|
|
return result;
|
|
}
|
|
catch (error) {
|
|
this._debug(debugName, 'error', error);
|
|
if (isAuthError(error)) {
|
|
const result = { session: null, error };
|
|
if (!isAuthRetryableFetchError(error)) {
|
|
await this._removeSession();
|
|
}
|
|
(_a = this.refreshingDeferred) === null || _a === void 0 ? void 0 : _a.resolve(result);
|
|
return result;
|
|
}
|
|
(_b = this.refreshingDeferred) === null || _b === void 0 ? void 0 : _b.reject(error);
|
|
throw error;
|
|
}
|
|
finally {
|
|
this.refreshingDeferred = null;
|
|
this._debug(debugName, 'end');
|
|
}
|
|
}
|
|
async _notifyAllSubscribers(event, session, broadcast = true) {
|
|
const debugName = `#_notifyAllSubscribers(${event})`;
|
|
this._debug(debugName, 'begin', session, `broadcast = ${broadcast}`);
|
|
try {
|
|
if (this.broadcastChannel && broadcast) {
|
|
this.broadcastChannel.postMessage({ event, session });
|
|
}
|
|
const errors = [];
|
|
const promises = Array.from(this.stateChangeEmitters.values()).map(async (x) => {
|
|
try {
|
|
await x.callback(event, session);
|
|
}
|
|
catch (e) {
|
|
errors.push(e);
|
|
}
|
|
});
|
|
await Promise.all(promises);
|
|
if (errors.length > 0) {
|
|
for (let i = 0; i < errors.length; i += 1) {
|
|
console.error(errors[i]);
|
|
}
|
|
throw errors[0];
|
|
}
|
|
}
|
|
finally {
|
|
this._debug(debugName, 'end');
|
|
}
|
|
}
|
|
/**
|
|
* set currentSession and currentUser
|
|
* process to _startAutoRefreshToken if possible
|
|
*/
|
|
async _saveSession(session) {
|
|
this._debug('#_saveSession()', session);
|
|
// _saveSession is always called whenever a new session has been acquired
|
|
// so we can safely suppress the warning returned by future getSession calls
|
|
this.suppressGetSessionWarning = true;
|
|
// Create a shallow copy to work with, to avoid mutating the original session object if it's used elsewhere
|
|
const sessionToProcess = Object.assign({}, session);
|
|
const userIsProxy = sessionToProcess.user && sessionToProcess.user.__isUserNotAvailableProxy === true;
|
|
if (this.userStorage) {
|
|
if (!userIsProxy && sessionToProcess.user) {
|
|
// If it's a real user object, save it to userStorage.
|
|
await setItemAsync(this.userStorage, this.storageKey + '-user', {
|
|
user: sessionToProcess.user,
|
|
});
|
|
}
|
|
// Prepare the main session data for primary storage: remove the user property before cloning
|
|
// This is important because the original session.user might be the proxy
|
|
const mainSessionData = Object.assign({}, sessionToProcess);
|
|
delete mainSessionData.user; // Remove user (real or proxy) before cloning for main storage
|
|
const clonedMainSessionData = deepClone(mainSessionData);
|
|
await setItemAsync(this.storage, this.storageKey, clonedMainSessionData);
|
|
}
|
|
else {
|
|
// No userStorage is configured.
|
|
// In this case, session.user should ideally not be a proxy.
|
|
// If it were, structuredClone would fail. This implies an issue elsewhere if user is a proxy here
|
|
const clonedSession = deepClone(sessionToProcess); // sessionToProcess still has its original user property
|
|
await setItemAsync(this.storage, this.storageKey, clonedSession);
|
|
}
|
|
}
|
|
async _removeSession() {
|
|
this._debug('#_removeSession()');
|
|
await removeItemAsync(this.storage, this.storageKey);
|
|
await removeItemAsync(this.storage, this.storageKey + '-code-verifier');
|
|
await removeItemAsync(this.storage, this.storageKey + '-user');
|
|
if (this.userStorage) {
|
|
await removeItemAsync(this.userStorage, this.storageKey + '-user');
|
|
}
|
|
await this._notifyAllSubscribers('SIGNED_OUT', null);
|
|
}
|
|
/**
|
|
* Removes any registered visibilitychange callback.
|
|
*
|
|
* {@see #startAutoRefresh}
|
|
* {@see #stopAutoRefresh}
|
|
*/
|
|
_removeVisibilityChangedCallback() {
|
|
this._debug('#_removeVisibilityChangedCallback()');
|
|
const callback = this.visibilityChangedCallback;
|
|
this.visibilityChangedCallback = null;
|
|
try {
|
|
if (callback && isBrowser() && (window === null || window === void 0 ? void 0 : window.removeEventListener)) {
|
|
window.removeEventListener('visibilitychange', callback);
|
|
}
|
|
}
|
|
catch (e) {
|
|
console.error('removing visibilitychange callback failed', e);
|
|
}
|
|
}
|
|
/**
|
|
* This is the private implementation of {@link #startAutoRefresh}. Use this
|
|
* within the library.
|
|
*/
|
|
async _startAutoRefresh() {
|
|
await this._stopAutoRefresh();
|
|
this._debug('#_startAutoRefresh()');
|
|
const ticker = setInterval(() => this._autoRefreshTokenTick(), AUTO_REFRESH_TICK_DURATION_MS);
|
|
this.autoRefreshTicker = ticker;
|
|
if (ticker && typeof ticker === 'object' && typeof ticker.unref === 'function') {
|
|
// ticker is a NodeJS Timeout object that has an `unref` method
|
|
// https://nodejs.org/api/timers.html#timeoutunref
|
|
// When auto refresh is used in NodeJS (like for testing) the
|
|
// `setInterval` is preventing the process from being marked as
|
|
// finished and tests run endlessly. This can be prevented by calling
|
|
// `unref()` on the returned object.
|
|
ticker.unref();
|
|
// @ts-expect-error TS has no context of Deno
|
|
}
|
|
else if (typeof Deno !== 'undefined' && typeof Deno.unrefTimer === 'function') {
|
|
// similar like for NodeJS, but with the Deno API
|
|
// https://deno.land/api@latest?unstable&s=Deno.unrefTimer
|
|
// @ts-expect-error TS has no context of Deno
|
|
Deno.unrefTimer(ticker);
|
|
}
|
|
// run the tick immediately, but in the next pass of the event loop so that
|
|
// #_initialize can be allowed to complete without recursively waiting on
|
|
// itself
|
|
setTimeout(async () => {
|
|
await this.initializePromise;
|
|
await this._autoRefreshTokenTick();
|
|
}, 0);
|
|
}
|
|
/**
|
|
* This is the private implementation of {@link #stopAutoRefresh}. Use this
|
|
* within the library.
|
|
*/
|
|
async _stopAutoRefresh() {
|
|
this._debug('#_stopAutoRefresh()');
|
|
const ticker = this.autoRefreshTicker;
|
|
this.autoRefreshTicker = null;
|
|
if (ticker) {
|
|
clearInterval(ticker);
|
|
}
|
|
}
|
|
/**
|
|
* Starts an auto-refresh process in the background. The session is checked
|
|
* every few seconds. Close to the time of expiration a process is started to
|
|
* refresh the session. If refreshing fails it will be retried for as long as
|
|
* necessary.
|
|
*
|
|
* If you set the {@link GoTrueClientOptions#autoRefreshToken} you don't need
|
|
* to call this function, it will be called for you.
|
|
*
|
|
* On browsers the refresh process works only when the tab/window is in the
|
|
* foreground to conserve resources as well as prevent race conditions and
|
|
* flooding auth with requests. If you call this method any managed
|
|
* visibility change callback will be removed and you must manage visibility
|
|
* changes on your own.
|
|
*
|
|
* On non-browser platforms the refresh process works *continuously* in the
|
|
* background, which may not be desirable. You should hook into your
|
|
* platform's foreground indication mechanism and call these methods
|
|
* appropriately to conserve resources.
|
|
*
|
|
* {@see #stopAutoRefresh}
|
|
*/
|
|
async startAutoRefresh() {
|
|
this._removeVisibilityChangedCallback();
|
|
await this._startAutoRefresh();
|
|
}
|
|
/**
|
|
* Stops an active auto refresh process running in the background (if any).
|
|
*
|
|
* If you call this method any managed visibility change callback will be
|
|
* removed and you must manage visibility changes on your own.
|
|
*
|
|
* See {@link #startAutoRefresh} for more details.
|
|
*/
|
|
async stopAutoRefresh() {
|
|
this._removeVisibilityChangedCallback();
|
|
await this._stopAutoRefresh();
|
|
}
|
|
/**
|
|
* Runs the auto refresh token tick.
|
|
*/
|
|
async _autoRefreshTokenTick() {
|
|
this._debug('#_autoRefreshTokenTick()', 'begin');
|
|
try {
|
|
await this._acquireLock(0, async () => {
|
|
try {
|
|
const now = Date.now();
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
const { data: { session }, } = result;
|
|
if (!session || !session.refresh_token || !session.expires_at) {
|
|
this._debug('#_autoRefreshTokenTick()', 'no session');
|
|
return;
|
|
}
|
|
// session will expire in this many ticks (or has already expired if <= 0)
|
|
const expiresInTicks = Math.floor((session.expires_at * 1000 - now) / AUTO_REFRESH_TICK_DURATION_MS);
|
|
this._debug('#_autoRefreshTokenTick()', `access token expires in ${expiresInTicks} ticks, a tick lasts ${AUTO_REFRESH_TICK_DURATION_MS}ms, refresh threshold is ${AUTO_REFRESH_TICK_THRESHOLD} ticks`);
|
|
if (expiresInTicks <= AUTO_REFRESH_TICK_THRESHOLD) {
|
|
await this._callRefreshToken(session.refresh_token);
|
|
}
|
|
});
|
|
}
|
|
catch (e) {
|
|
console.error('Auto refresh tick failed with error. This is likely a transient error.', e);
|
|
}
|
|
}
|
|
finally {
|
|
this._debug('#_autoRefreshTokenTick()', 'end');
|
|
}
|
|
});
|
|
}
|
|
catch (e) {
|
|
if (e.isAcquireTimeout || e instanceof LockAcquireTimeoutError) {
|
|
this._debug('auto refresh token tick lock not available');
|
|
}
|
|
else {
|
|
throw e;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Registers callbacks on the browser / platform, which in-turn run
|
|
* algorithms when the browser window/tab are in foreground. On non-browser
|
|
* platforms it assumes always foreground.
|
|
*/
|
|
async _handleVisibilityChange() {
|
|
this._debug('#_handleVisibilityChange()');
|
|
if (!isBrowser() || !(window === null || window === void 0 ? void 0 : window.addEventListener)) {
|
|
if (this.autoRefreshToken) {
|
|
// in non-browser environments the refresh token ticker runs always
|
|
this.startAutoRefresh();
|
|
}
|
|
return false;
|
|
}
|
|
try {
|
|
this.visibilityChangedCallback = async () => await this._onVisibilityChanged(false);
|
|
window === null || window === void 0 ? void 0 : window.addEventListener('visibilitychange', this.visibilityChangedCallback);
|
|
// now immediately call the visbility changed callback to setup with the
|
|
// current visbility state
|
|
await this._onVisibilityChanged(true); // initial call
|
|
}
|
|
catch (error) {
|
|
console.error('_handleVisibilityChange', error);
|
|
}
|
|
}
|
|
/**
|
|
* Callback registered with `window.addEventListener('visibilitychange')`.
|
|
*/
|
|
async _onVisibilityChanged(calledFromInitialize) {
|
|
const methodName = `#_onVisibilityChanged(${calledFromInitialize})`;
|
|
this._debug(methodName, 'visibilityState', document.visibilityState);
|
|
if (document.visibilityState === 'visible') {
|
|
if (this.autoRefreshToken) {
|
|
// in browser environments the refresh token ticker runs only on focused tabs
|
|
// which prevents race conditions
|
|
this._startAutoRefresh();
|
|
}
|
|
if (!calledFromInitialize) {
|
|
// called when the visibility has changed, i.e. the browser
|
|
// transitioned from hidden -> visible so we need to see if the session
|
|
// should be recovered immediately... but to do that we need to acquire
|
|
// the lock first asynchronously
|
|
await this.initializePromise;
|
|
await this._acquireLock(-1, async () => {
|
|
if (document.visibilityState !== 'visible') {
|
|
this._debug(methodName, 'acquired the lock to recover the session, but the browser visibilityState is no longer visible, aborting');
|
|
// visibility has changed while waiting for the lock, abort
|
|
return;
|
|
}
|
|
// recover the session
|
|
await this._recoverAndRefresh();
|
|
});
|
|
}
|
|
}
|
|
else if (document.visibilityState === 'hidden') {
|
|
if (this.autoRefreshToken) {
|
|
this._stopAutoRefresh();
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Generates the relevant login URL for a third-party provider.
|
|
* @param options.redirectTo A URL or mobile address to send the user to after they are confirmed.
|
|
* @param options.scopes A space-separated list of scopes granted to the OAuth application.
|
|
* @param options.queryParams An object of key-value pairs containing query parameters granted to the OAuth application.
|
|
*/
|
|
async _getUrlForProvider(url, provider, options) {
|
|
const urlParams = [`provider=${encodeURIComponent(provider)}`];
|
|
if (options === null || options === void 0 ? void 0 : options.redirectTo) {
|
|
urlParams.push(`redirect_to=${encodeURIComponent(options.redirectTo)}`);
|
|
}
|
|
if (options === null || options === void 0 ? void 0 : options.scopes) {
|
|
urlParams.push(`scopes=${encodeURIComponent(options.scopes)}`);
|
|
}
|
|
if (this.flowType === 'pkce') {
|
|
const [codeChallenge, codeChallengeMethod] = await getCodeChallengeAndMethod(this.storage, this.storageKey);
|
|
const flowParams = new URLSearchParams({
|
|
code_challenge: `${encodeURIComponent(codeChallenge)}`,
|
|
code_challenge_method: `${encodeURIComponent(codeChallengeMethod)}`,
|
|
});
|
|
urlParams.push(flowParams.toString());
|
|
}
|
|
if (options === null || options === void 0 ? void 0 : options.queryParams) {
|
|
const query = new URLSearchParams(options.queryParams);
|
|
urlParams.push(query.toString());
|
|
}
|
|
if (options === null || options === void 0 ? void 0 : options.skipBrowserRedirect) {
|
|
urlParams.push(`skip_http_redirect=${options.skipBrowserRedirect}`);
|
|
}
|
|
return `${url}?${urlParams.join('&')}`;
|
|
}
|
|
async _unenroll(params) {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
var _a;
|
|
const { data: sessionData, error: sessionError } = result;
|
|
if (sessionError) {
|
|
return { data: null, error: sessionError };
|
|
}
|
|
return await _request(this.fetch, 'DELETE', `${this.url}/factors/${params.factorId}`, {
|
|
headers: this.headers,
|
|
jwt: (_a = sessionData === null || sessionData === void 0 ? void 0 : sessionData.session) === null || _a === void 0 ? void 0 : _a.access_token,
|
|
});
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
async _enroll(params) {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
var _a, _b;
|
|
const { data: sessionData, error: sessionError } = result;
|
|
if (sessionError) {
|
|
return { data: null, error: sessionError };
|
|
}
|
|
const body = Object.assign({ friendly_name: params.friendlyName, factor_type: params.factorType }, (params.factorType === 'phone' ? { phone: params.phone } : { issuer: params.issuer }));
|
|
const { data, error } = await _request(this.fetch, 'POST', `${this.url}/factors`, {
|
|
body,
|
|
headers: this.headers,
|
|
jwt: (_a = sessionData === null || sessionData === void 0 ? void 0 : sessionData.session) === null || _a === void 0 ? void 0 : _a.access_token,
|
|
});
|
|
if (error) {
|
|
return { data: null, error };
|
|
}
|
|
if (params.factorType === 'totp' && ((_b = data === null || data === void 0 ? void 0 : data.totp) === null || _b === void 0 ? void 0 : _b.qr_code)) {
|
|
data.totp.qr_code = `data:image/svg+xml;utf-8,${data.totp.qr_code}`;
|
|
}
|
|
return { data, error: null };
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
/**
|
|
* {@see GoTrueMFAApi#verify}
|
|
*/
|
|
async _verify(params) {
|
|
return this._acquireLock(-1, async () => {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
var _a;
|
|
const { data: sessionData, error: sessionError } = result;
|
|
if (sessionError) {
|
|
return { data: null, error: sessionError };
|
|
}
|
|
const { data, error } = await _request(this.fetch, 'POST', `${this.url}/factors/${params.factorId}/verify`, {
|
|
body: { code: params.code, challenge_id: params.challengeId },
|
|
headers: this.headers,
|
|
jwt: (_a = sessionData === null || sessionData === void 0 ? void 0 : sessionData.session) === null || _a === void 0 ? void 0 : _a.access_token,
|
|
});
|
|
if (error) {
|
|
return { data: null, error };
|
|
}
|
|
await this._saveSession(Object.assign({ expires_at: Math.round(Date.now() / 1000) + data.expires_in }, data));
|
|
await this._notifyAllSubscribers('MFA_CHALLENGE_VERIFIED', data);
|
|
return { data, error };
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* {@see GoTrueMFAApi#challenge}
|
|
*/
|
|
async _challenge(params) {
|
|
return this._acquireLock(-1, async () => {
|
|
try {
|
|
return await this._useSession(async (result) => {
|
|
var _a;
|
|
const { data: sessionData, error: sessionError } = result;
|
|
if (sessionError) {
|
|
return { data: null, error: sessionError };
|
|
}
|
|
return await _request(this.fetch, 'POST', `${this.url}/factors/${params.factorId}/challenge`, {
|
|
body: { channel: params.channel },
|
|
headers: this.headers,
|
|
jwt: (_a = sessionData === null || sessionData === void 0 ? void 0 : sessionData.session) === null || _a === void 0 ? void 0 : _a.access_token,
|
|
});
|
|
});
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* {@see GoTrueMFAApi#challengeAndVerify}
|
|
*/
|
|
async _challengeAndVerify(params) {
|
|
// both _challenge and _verify independently acquire the lock, so no need
|
|
// to acquire it here
|
|
const { data: challengeData, error: challengeError } = await this._challenge({
|
|
factorId: params.factorId,
|
|
});
|
|
if (challengeError) {
|
|
return { data: null, error: challengeError };
|
|
}
|
|
return await this._verify({
|
|
factorId: params.factorId,
|
|
challengeId: challengeData.id,
|
|
code: params.code,
|
|
});
|
|
}
|
|
/**
|
|
* {@see GoTrueMFAApi#listFactors}
|
|
*/
|
|
async _listFactors() {
|
|
// use #getUser instead of #_getUser as the former acquires a lock
|
|
const { data: { user }, error: userError, } = await this.getUser();
|
|
if (userError) {
|
|
return { data: null, error: userError };
|
|
}
|
|
const factors = (user === null || user === void 0 ? void 0 : user.factors) || [];
|
|
const totp = factors.filter((factor) => factor.factor_type === 'totp' && factor.status === 'verified');
|
|
const phone = factors.filter((factor) => factor.factor_type === 'phone' && factor.status === 'verified');
|
|
return {
|
|
data: {
|
|
all: factors,
|
|
totp,
|
|
phone,
|
|
},
|
|
error: null,
|
|
};
|
|
}
|
|
/**
|
|
* {@see GoTrueMFAApi#getAuthenticatorAssuranceLevel}
|
|
*/
|
|
async _getAuthenticatorAssuranceLevel() {
|
|
return this._acquireLock(-1, async () => {
|
|
return await this._useSession(async (result) => {
|
|
var _a, _b;
|
|
const { data: { session }, error: sessionError, } = result;
|
|
if (sessionError) {
|
|
return { data: null, error: sessionError };
|
|
}
|
|
if (!session) {
|
|
return {
|
|
data: { currentLevel: null, nextLevel: null, currentAuthenticationMethods: [] },
|
|
error: null,
|
|
};
|
|
}
|
|
const { payload } = decodeJWT(session.access_token);
|
|
let currentLevel = null;
|
|
if (payload.aal) {
|
|
currentLevel = payload.aal;
|
|
}
|
|
let nextLevel = currentLevel;
|
|
const verifiedFactors = (_b = (_a = session.user.factors) === null || _a === void 0 ? void 0 : _a.filter((factor) => factor.status === 'verified')) !== null && _b !== void 0 ? _b : [];
|
|
if (verifiedFactors.length > 0) {
|
|
nextLevel = 'aal2';
|
|
}
|
|
const currentAuthenticationMethods = payload.amr || [];
|
|
return { data: { currentLevel, nextLevel, currentAuthenticationMethods }, error: null };
|
|
});
|
|
});
|
|
}
|
|
async fetchJwk(kid, jwks = { keys: [] }) {
|
|
// try fetching from the supplied jwks
|
|
let jwk = jwks.keys.find((key) => key.kid === kid);
|
|
if (jwk) {
|
|
return jwk;
|
|
}
|
|
const now = Date.now();
|
|
// try fetching from cache
|
|
jwk = this.jwks.keys.find((key) => key.kid === kid);
|
|
// jwk exists and jwks isn't stale
|
|
if (jwk && this.jwks_cached_at + JWKS_TTL > now) {
|
|
return jwk;
|
|
}
|
|
// jwk isn't cached in memory so we need to fetch it from the well-known endpoint
|
|
const { data, error } = await _request(this.fetch, 'GET', `${this.url}/.well-known/jwks.json`, {
|
|
headers: this.headers,
|
|
});
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
if (!data.keys || data.keys.length === 0) {
|
|
return null;
|
|
}
|
|
this.jwks = data;
|
|
this.jwks_cached_at = now;
|
|
// Find the signing key
|
|
jwk = data.keys.find((key) => key.kid === kid);
|
|
if (!jwk) {
|
|
return null;
|
|
}
|
|
return jwk;
|
|
}
|
|
/**
|
|
* Extracts the JWT claims present in the access token by first verifying the
|
|
* JWT against the server's JSON Web Key Set endpoint
|
|
* `/.well-known/jwks.json` which is often cached, resulting in significantly
|
|
* faster responses. Prefer this method over {@link #getUser} which always
|
|
* sends a request to the Auth server for each JWT.
|
|
*
|
|
* If the project is not using an asymmetric JWT signing key (like ECC or
|
|
* RSA) it always sends a request to the Auth server (similar to {@link
|
|
* #getUser}) to verify the JWT.
|
|
*
|
|
* @param jwt An optional specific JWT you wish to verify, not the one you
|
|
* can obtain from {@link #getSession}.
|
|
* @param options Various additional options that allow you to customize the
|
|
* behavior of this method.
|
|
*/
|
|
async getClaims(jwt, options = {}) {
|
|
try {
|
|
let token = jwt;
|
|
if (!token) {
|
|
const { data, error } = await this.getSession();
|
|
if (error || !data.session) {
|
|
return { data: null, error };
|
|
}
|
|
token = data.session.access_token;
|
|
}
|
|
const { header, payload, signature, raw: { header: rawHeader, payload: rawPayload }, } = decodeJWT(token);
|
|
if (!(options === null || options === void 0 ? void 0 : options.allowExpired)) {
|
|
// Reject expired JWTs should only happen if jwt argument was passed
|
|
validateExp(payload.exp);
|
|
}
|
|
const signingKey = !header.alg ||
|
|
header.alg.startsWith('HS') ||
|
|
!header.kid ||
|
|
!('crypto' in globalThis && 'subtle' in globalThis.crypto)
|
|
? null
|
|
: await this.fetchJwk(header.kid, (options === null || options === void 0 ? void 0 : options.keys) ? { keys: options.keys } : options === null || options === void 0 ? void 0 : options.jwks);
|
|
// If symmetric algorithm or WebCrypto API is unavailable, fallback to getUser()
|
|
if (!signingKey) {
|
|
const { error } = await this.getUser(token);
|
|
if (error) {
|
|
throw error;
|
|
}
|
|
// getUser succeeds so the claims in the JWT can be trusted
|
|
return {
|
|
data: {
|
|
claims: payload,
|
|
header,
|
|
signature,
|
|
},
|
|
error: null,
|
|
};
|
|
}
|
|
const algorithm = getAlgorithm(header.alg);
|
|
// Convert JWK to CryptoKey
|
|
const publicKey = await crypto.subtle.importKey('jwk', signingKey, algorithm, true, [
|
|
'verify',
|
|
]);
|
|
// Verify the signature
|
|
const isValid = await crypto.subtle.verify(algorithm, publicKey, signature, stringToUint8Array(`${rawHeader}.${rawPayload}`));
|
|
if (!isValid) {
|
|
throw new AuthInvalidJwtError('Invalid JWT signature');
|
|
}
|
|
// If verification succeeds, decode and return claims
|
|
return {
|
|
data: {
|
|
claims: payload,
|
|
header,
|
|
signature,
|
|
},
|
|
error: null,
|
|
};
|
|
}
|
|
catch (error) {
|
|
if (isAuthError(error)) {
|
|
return { data: null, error };
|
|
}
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
GoTrueClient.nextInstanceID = 0;
|
|
|
|
const AuthClient = GoTrueClient;
|
|
|
|
class SupabaseAuthClient extends AuthClient {
|
|
constructor(options) {
|
|
super(options);
|
|
}
|
|
}
|
|
|
|
var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
/**
|
|
* Supabase Client.
|
|
*
|
|
* An isomorphic Javascript client for interacting with Postgres.
|
|
*/
|
|
class SupabaseClient {
|
|
/**
|
|
* Create a new client for use in the browser.
|
|
* @param supabaseUrl The unique Supabase URL which is supplied when you create a new project in your project dashboard.
|
|
* @param supabaseKey The unique Supabase Key which is supplied when you create a new project in your project dashboard.
|
|
* @param options.db.schema You can switch in between schemas. The schema needs to be on the list of exposed schemas inside Supabase.
|
|
* @param options.auth.autoRefreshToken Set to "true" if you want to automatically refresh the token before expiring.
|
|
* @param options.auth.persistSession Set to "true" if you want to automatically save the user session into local storage.
|
|
* @param options.auth.detectSessionInUrl Set to "true" if you want to automatically detects OAuth grants in the URL and signs in the user.
|
|
* @param options.realtime Options passed along to realtime-js constructor.
|
|
* @param options.storage Options passed along to the storage-js constructor.
|
|
* @param options.global.fetch A custom fetch implementation.
|
|
* @param options.global.headers Any additional headers to send with each network request.
|
|
*/
|
|
constructor(supabaseUrl, supabaseKey, options) {
|
|
var _a, _b, _c;
|
|
this.supabaseUrl = supabaseUrl;
|
|
this.supabaseKey = supabaseKey;
|
|
if (!supabaseUrl)
|
|
throw new Error('supabaseUrl is required.');
|
|
if (!supabaseKey)
|
|
throw new Error('supabaseKey is required.');
|
|
const _supabaseUrl = ensureTrailingSlash(supabaseUrl);
|
|
const baseUrl = new URL(_supabaseUrl);
|
|
this.realtimeUrl = new URL('realtime/v1', baseUrl);
|
|
this.realtimeUrl.protocol = this.realtimeUrl.protocol.replace('http', 'ws');
|
|
this.authUrl = new URL('auth/v1', baseUrl);
|
|
this.storageUrl = new URL('storage/v1', baseUrl);
|
|
this.functionsUrl = new URL('functions/v1', baseUrl);
|
|
// default storage key uses the supabase project ref as a namespace
|
|
const defaultStorageKey = `sb-${baseUrl.hostname.split('.')[0]}-auth-token`;
|
|
const DEFAULTS = {
|
|
db: DEFAULT_DB_OPTIONS,
|
|
realtime: DEFAULT_REALTIME_OPTIONS,
|
|
auth: Object.assign(Object.assign({}, DEFAULT_AUTH_OPTIONS), { storageKey: defaultStorageKey }),
|
|
global: DEFAULT_GLOBAL_OPTIONS,
|
|
};
|
|
const settings = applySettingDefaults(options !== null && options !== void 0 ? options : {}, DEFAULTS);
|
|
this.storageKey = (_a = settings.auth.storageKey) !== null && _a !== void 0 ? _a : '';
|
|
this.headers = (_b = settings.global.headers) !== null && _b !== void 0 ? _b : {};
|
|
if (!settings.accessToken) {
|
|
this.auth = this._initSupabaseAuthClient((_c = settings.auth) !== null && _c !== void 0 ? _c : {}, this.headers, settings.global.fetch);
|
|
}
|
|
else {
|
|
this.accessToken = settings.accessToken;
|
|
this.auth = new Proxy({}, {
|
|
get: (_, prop) => {
|
|
throw new Error(`@supabase/supabase-js: Supabase Client is configured with the accessToken option, accessing supabase.auth.${String(prop)} is not possible`);
|
|
},
|
|
});
|
|
}
|
|
this.fetch = fetchWithAuth(supabaseKey, this._getAccessToken.bind(this), settings.global.fetch);
|
|
this.realtime = this._initRealtimeClient(Object.assign({ headers: this.headers, accessToken: this._getAccessToken.bind(this) }, settings.realtime));
|
|
this.rest = new PostgrestClient(new URL('rest/v1', baseUrl).href, {
|
|
headers: this.headers,
|
|
schema: settings.db.schema,
|
|
fetch: this.fetch,
|
|
});
|
|
this.storage = new StorageClient(this.storageUrl.href, this.headers, this.fetch, options === null || options === void 0 ? void 0 : options.storage);
|
|
if (!settings.accessToken) {
|
|
this._listenForAuthEvents();
|
|
}
|
|
}
|
|
/**
|
|
* Supabase Functions allows you to deploy and invoke edge functions.
|
|
*/
|
|
get functions() {
|
|
return new FunctionsClient(this.functionsUrl.href, {
|
|
headers: this.headers,
|
|
customFetch: this.fetch,
|
|
});
|
|
}
|
|
/**
|
|
* Perform a query on a table or a view.
|
|
*
|
|
* @param relation - The table or view name to query
|
|
*/
|
|
from(relation) {
|
|
return this.rest.from(relation);
|
|
}
|
|
// NOTE: signatures must be kept in sync with PostgrestClient.schema
|
|
/**
|
|
* Select a schema to query or perform an function (rpc) call.
|
|
*
|
|
* The schema needs to be on the list of exposed schemas inside Supabase.
|
|
*
|
|
* @param schema - The schema to query
|
|
*/
|
|
schema(schema) {
|
|
return this.rest.schema(schema);
|
|
}
|
|
// NOTE: signatures must be kept in sync with PostgrestClient.rpc
|
|
/**
|
|
* Perform a function call.
|
|
*
|
|
* @param fn - The function name to call
|
|
* @param args - The arguments to pass to the function call
|
|
* @param options - Named parameters
|
|
* @param options.head - When set to `true`, `data` will not be returned.
|
|
* Useful if you only need the count.
|
|
* @param options.get - When set to `true`, the function will be called with
|
|
* read-only access mode.
|
|
* @param options.count - Count algorithm to use to count rows returned by the
|
|
* function. Only applicable for [set-returning
|
|
* functions](https://www.postgresql.org/docs/current/functions-srf.html).
|
|
*
|
|
* `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the
|
|
* hood.
|
|
*
|
|
* `"planned"`: Approximated but fast count algorithm. Uses the Postgres
|
|
* statistics under the hood.
|
|
*
|
|
* `"estimated"`: Uses exact count for low numbers and planned count for high
|
|
* numbers.
|
|
*/
|
|
rpc(fn, args = {}, options = {}) {
|
|
return this.rest.rpc(fn, args, options);
|
|
}
|
|
/**
|
|
* Creates a Realtime channel with Broadcast, Presence, and Postgres Changes.
|
|
*
|
|
* @param {string} name - The name of the Realtime channel.
|
|
* @param {Object} opts - The options to pass to the Realtime channel.
|
|
*
|
|
*/
|
|
channel(name, opts = { config: {} }) {
|
|
return this.realtime.channel(name, opts);
|
|
}
|
|
/**
|
|
* Returns all Realtime channels.
|
|
*/
|
|
getChannels() {
|
|
return this.realtime.getChannels();
|
|
}
|
|
/**
|
|
* Unsubscribes and removes Realtime channel from Realtime client.
|
|
*
|
|
* @param {RealtimeChannel} channel - The name of the Realtime channel.
|
|
*
|
|
*/
|
|
removeChannel(channel) {
|
|
return this.realtime.removeChannel(channel);
|
|
}
|
|
/**
|
|
* Unsubscribes and removes all Realtime channels from Realtime client.
|
|
*/
|
|
removeAllChannels() {
|
|
return this.realtime.removeAllChannels();
|
|
}
|
|
_getAccessToken() {
|
|
var _a, _b;
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (this.accessToken) {
|
|
return yield this.accessToken();
|
|
}
|
|
const { data } = yield this.auth.getSession();
|
|
return (_b = (_a = data.session) === null || _a === void 0 ? void 0 : _a.access_token) !== null && _b !== void 0 ? _b : this.supabaseKey;
|
|
});
|
|
}
|
|
_initSupabaseAuthClient({ autoRefreshToken, persistSession, detectSessionInUrl, storage, storageKey, flowType, lock, debug, }, headers, fetch) {
|
|
const authHeaders = {
|
|
Authorization: `Bearer ${this.supabaseKey}`,
|
|
apikey: `${this.supabaseKey}`,
|
|
};
|
|
return new SupabaseAuthClient({
|
|
url: this.authUrl.href,
|
|
headers: Object.assign(Object.assign({}, authHeaders), headers),
|
|
storageKey: storageKey,
|
|
autoRefreshToken,
|
|
persistSession,
|
|
detectSessionInUrl,
|
|
storage,
|
|
flowType,
|
|
lock,
|
|
debug,
|
|
fetch,
|
|
// auth checks if there is a custom authorizaiton header using this flag
|
|
// so it knows whether to return an error when getUser is called with no session
|
|
hasCustomAuthorizationHeader: 'Authorization' in this.headers,
|
|
});
|
|
}
|
|
_initRealtimeClient(options) {
|
|
return new RealtimeClient(this.realtimeUrl.href, Object.assign(Object.assign({}, options), { params: Object.assign({ apikey: this.supabaseKey }, options === null || options === void 0 ? void 0 : options.params) }));
|
|
}
|
|
_listenForAuthEvents() {
|
|
let data = this.auth.onAuthStateChange((event, session) => {
|
|
this._handleTokenChanged(event, 'CLIENT', session === null || session === void 0 ? void 0 : session.access_token);
|
|
});
|
|
return data;
|
|
}
|
|
_handleTokenChanged(event, source, token) {
|
|
if ((event === 'TOKEN_REFRESHED' || event === 'SIGNED_IN') &&
|
|
this.changedAccessToken !== token) {
|
|
this.changedAccessToken = token;
|
|
}
|
|
else if (event === 'SIGNED_OUT') {
|
|
this.realtime.setAuth();
|
|
if (source == 'STORAGE')
|
|
this.auth.signOut();
|
|
this.changedAccessToken = undefined;
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Creates a new Supabase Client.
|
|
*/
|
|
const createClient = (supabaseUrl, supabaseKey, options) => {
|
|
return new SupabaseClient(supabaseUrl, supabaseKey, options);
|
|
};
|
|
// Check for Node.js <= 18 deprecation
|
|
function shouldShowDeprecationWarning() {
|
|
// Skip in browser environments
|
|
if (typeof window !== 'undefined') {
|
|
return false;
|
|
}
|
|
// Skip if process is not available (e.g., Edge Runtime)
|
|
if (typeof process === 'undefined') {
|
|
return false;
|
|
}
|
|
// Use dynamic property access to avoid Next.js Edge Runtime static analysis warnings
|
|
const processVersion = process['version'];
|
|
if (processVersion === undefined || processVersion === null) {
|
|
return false;
|
|
}
|
|
const versionMatch = processVersion.match(/^v(\d+)\./);
|
|
if (!versionMatch) {
|
|
return false;
|
|
}
|
|
const majorVersion = parseInt(versionMatch[1], 10);
|
|
return majorVersion <= 18;
|
|
}
|
|
if (shouldShowDeprecationWarning()) {
|
|
console.warn(`⚠️ Node.js 18 and below are deprecated and will no longer be supported in future versions of @supabase/supabase-js. ` +
|
|
`Please upgrade to Node.js 20 or later. ` +
|
|
`For more information, visit: https://github.com/orgs/supabase/discussions/37217`);
|
|
}
|
|
|
|
const createStoreImpl = (createState) => {
|
|
let state;
|
|
const listeners = /* @__PURE__ */ new Set();
|
|
const setState = (partial, replace) => {
|
|
const nextState = typeof partial === "function" ? partial(state) : partial;
|
|
if (!Object.is(nextState, state)) {
|
|
const previousState = state;
|
|
state = (replace != null ? replace : typeof nextState !== "object" || nextState === null) ? nextState : Object.assign({}, state, nextState);
|
|
listeners.forEach((listener) => listener(state, previousState));
|
|
}
|
|
};
|
|
const getState = () => state;
|
|
const getInitialState = () => initialState;
|
|
const subscribe = (listener) => {
|
|
listeners.add(listener);
|
|
return () => listeners.delete(listener);
|
|
};
|
|
const api = { setState, getState, getInitialState, subscribe };
|
|
const initialState = state = createState(setState, getState, api);
|
|
return api;
|
|
};
|
|
const createStore = ((createState) => createState ? createStoreImpl(createState) : createStoreImpl);
|
|
|
|
const identity = (arg) => arg;
|
|
function useStore(api, selector = identity) {
|
|
const slice = React.useSyncExternalStore(
|
|
api.subscribe,
|
|
React.useCallback(() => selector(api.getState()), [api, selector]),
|
|
React.useCallback(() => selector(api.getInitialState()), [api, selector])
|
|
);
|
|
React.useDebugValue(slice);
|
|
return slice;
|
|
}
|
|
const createImpl = (createState) => {
|
|
const api = createStore(createState);
|
|
const useBoundStore = (selector) => useStore(api, selector);
|
|
Object.assign(useBoundStore, api);
|
|
return useBoundStore;
|
|
};
|
|
const create = ((createState) => createState ? createImpl(createState) : createImpl);
|
|
|
|
function bind(fn, thisArg) {
|
|
return function wrap() {
|
|
return fn.apply(thisArg, arguments);
|
|
};
|
|
}
|
|
|
|
// utils is a library of generic helper functions non-specific to axios
|
|
|
|
const {toString} = Object.prototype;
|
|
const {getPrototypeOf} = Object;
|
|
const {iterator, toStringTag} = Symbol;
|
|
|
|
const kindOf = (cache => thing => {
|
|
const str = toString.call(thing);
|
|
return cache[str] || (cache[str] = str.slice(8, -1).toLowerCase());
|
|
})(Object.create(null));
|
|
|
|
const kindOfTest = (type) => {
|
|
type = type.toLowerCase();
|
|
return (thing) => kindOf(thing) === type
|
|
};
|
|
|
|
const typeOfTest = type => thing => typeof thing === type;
|
|
|
|
/**
|
|
* Determine if a value is an Array
|
|
*
|
|
* @param {Object} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is an Array, otherwise false
|
|
*/
|
|
const {isArray} = Array;
|
|
|
|
/**
|
|
* Determine if a value is undefined
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if the value is undefined, otherwise false
|
|
*/
|
|
const isUndefined = typeOfTest('undefined');
|
|
|
|
/**
|
|
* Determine if a value is a Buffer
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a Buffer, otherwise false
|
|
*/
|
|
function isBuffer(val) {
|
|
return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor)
|
|
&& isFunction(val.constructor.isBuffer) && val.constructor.isBuffer(val);
|
|
}
|
|
|
|
/**
|
|
* Determine if a value is an ArrayBuffer
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is an ArrayBuffer, otherwise false
|
|
*/
|
|
const isArrayBuffer = kindOfTest('ArrayBuffer');
|
|
|
|
|
|
/**
|
|
* Determine if a value is a view on an ArrayBuffer
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false
|
|
*/
|
|
function isArrayBufferView(val) {
|
|
let result;
|
|
if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) {
|
|
result = ArrayBuffer.isView(val);
|
|
} else {
|
|
result = (val) && (val.buffer) && (isArrayBuffer(val.buffer));
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* Determine if a value is a String
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a String, otherwise false
|
|
*/
|
|
const isString = typeOfTest('string');
|
|
|
|
/**
|
|
* Determine if a value is a Function
|
|
*
|
|
* @param {*} val The value to test
|
|
* @returns {boolean} True if value is a Function, otherwise false
|
|
*/
|
|
const isFunction = typeOfTest('function');
|
|
|
|
/**
|
|
* Determine if a value is a Number
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a Number, otherwise false
|
|
*/
|
|
const isNumber = typeOfTest('number');
|
|
|
|
/**
|
|
* Determine if a value is an Object
|
|
*
|
|
* @param {*} thing The value to test
|
|
*
|
|
* @returns {boolean} True if value is an Object, otherwise false
|
|
*/
|
|
const isObject = (thing) => thing !== null && typeof thing === 'object';
|
|
|
|
/**
|
|
* Determine if a value is a Boolean
|
|
*
|
|
* @param {*} thing The value to test
|
|
* @returns {boolean} True if value is a Boolean, otherwise false
|
|
*/
|
|
const isBoolean = thing => thing === true || thing === false;
|
|
|
|
/**
|
|
* Determine if a value is a plain Object
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a plain Object, otherwise false
|
|
*/
|
|
const isPlainObject = (val) => {
|
|
if (kindOf(val) !== 'object') {
|
|
return false;
|
|
}
|
|
|
|
const prototype = getPrototypeOf(val);
|
|
return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(toStringTag in val) && !(iterator in val);
|
|
};
|
|
|
|
/**
|
|
* Determine if a value is an empty object (safely handles Buffers)
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is an empty object, otherwise false
|
|
*/
|
|
const isEmptyObject = (val) => {
|
|
// Early return for non-objects or Buffers to prevent RangeError
|
|
if (!isObject(val) || isBuffer(val)) {
|
|
return false;
|
|
}
|
|
|
|
try {
|
|
return Object.keys(val).length === 0 && Object.getPrototypeOf(val) === Object.prototype;
|
|
} catch (e) {
|
|
// Fallback for any other objects that might cause RangeError with Object.keys()
|
|
return false;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Determine if a value is a Date
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a Date, otherwise false
|
|
*/
|
|
const isDate = kindOfTest('Date');
|
|
|
|
/**
|
|
* Determine if a value is a File
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a File, otherwise false
|
|
*/
|
|
const isFile = kindOfTest('File');
|
|
|
|
/**
|
|
* Determine if a value is a Blob
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a Blob, otherwise false
|
|
*/
|
|
const isBlob = kindOfTest('Blob');
|
|
|
|
/**
|
|
* Determine if a value is a FileList
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a File, otherwise false
|
|
*/
|
|
const isFileList = kindOfTest('FileList');
|
|
|
|
/**
|
|
* Determine if a value is a Stream
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a Stream, otherwise false
|
|
*/
|
|
const isStream = (val) => isObject(val) && isFunction(val.pipe);
|
|
|
|
/**
|
|
* Determine if a value is a FormData
|
|
*
|
|
* @param {*} thing The value to test
|
|
*
|
|
* @returns {boolean} True if value is an FormData, otherwise false
|
|
*/
|
|
const isFormData = (thing) => {
|
|
let kind;
|
|
return thing && (
|
|
(typeof FormData === 'function' && thing instanceof FormData) || (
|
|
isFunction(thing.append) && (
|
|
(kind = kindOf(thing)) === 'formdata' ||
|
|
// detect form-data instance
|
|
(kind === 'object' && isFunction(thing.toString) && thing.toString() === '[object FormData]')
|
|
)
|
|
)
|
|
)
|
|
};
|
|
|
|
/**
|
|
* Determine if a value is a URLSearchParams object
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a URLSearchParams object, otherwise false
|
|
*/
|
|
const isURLSearchParams = kindOfTest('URLSearchParams');
|
|
|
|
const [isReadableStream, isRequest, isResponse, isHeaders] = ['ReadableStream', 'Request', 'Response', 'Headers'].map(kindOfTest);
|
|
|
|
/**
|
|
* Trim excess whitespace off the beginning and end of a string
|
|
*
|
|
* @param {String} str The String to trim
|
|
*
|
|
* @returns {String} The String freed of excess whitespace
|
|
*/
|
|
const trim = (str) => str.trim ?
|
|
str.trim() : str.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, '');
|
|
|
|
/**
|
|
* Iterate over an Array or an Object invoking a function for each item.
|
|
*
|
|
* If `obj` is an Array callback will be called passing
|
|
* the value, index, and complete array for each item.
|
|
*
|
|
* If 'obj' is an Object callback will be called passing
|
|
* the value, key, and complete object for each property.
|
|
*
|
|
* @param {Object|Array} obj The object to iterate
|
|
* @param {Function} fn The callback to invoke for each item
|
|
*
|
|
* @param {Boolean} [allOwnKeys = false]
|
|
* @returns {any}
|
|
*/
|
|
function forEach(obj, fn, {allOwnKeys = false} = {}) {
|
|
// Don't bother if no value provided
|
|
if (obj === null || typeof obj === 'undefined') {
|
|
return;
|
|
}
|
|
|
|
let i;
|
|
let l;
|
|
|
|
// Force an array if not already something iterable
|
|
if (typeof obj !== 'object') {
|
|
/*eslint no-param-reassign:0*/
|
|
obj = [obj];
|
|
}
|
|
|
|
if (isArray(obj)) {
|
|
// Iterate over array values
|
|
for (i = 0, l = obj.length; i < l; i++) {
|
|
fn.call(null, obj[i], i, obj);
|
|
}
|
|
} else {
|
|
// Buffer check
|
|
if (isBuffer(obj)) {
|
|
return;
|
|
}
|
|
|
|
// Iterate over object keys
|
|
const keys = allOwnKeys ? Object.getOwnPropertyNames(obj) : Object.keys(obj);
|
|
const len = keys.length;
|
|
let key;
|
|
|
|
for (i = 0; i < len; i++) {
|
|
key = keys[i];
|
|
fn.call(null, obj[key], key, obj);
|
|
}
|
|
}
|
|
}
|
|
|
|
function findKey(obj, key) {
|
|
if (isBuffer(obj)){
|
|
return null;
|
|
}
|
|
|
|
key = key.toLowerCase();
|
|
const keys = Object.keys(obj);
|
|
let i = keys.length;
|
|
let _key;
|
|
while (i-- > 0) {
|
|
_key = keys[i];
|
|
if (key === _key.toLowerCase()) {
|
|
return _key;
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
const _global = (() => {
|
|
/*eslint no-undef:0*/
|
|
if (typeof globalThis !== "undefined") return globalThis;
|
|
return typeof self !== "undefined" ? self : (typeof window !== 'undefined' ? window : global)
|
|
})();
|
|
|
|
const isContextDefined = (context) => !isUndefined(context) && context !== _global;
|
|
|
|
/**
|
|
* Accepts varargs expecting each argument to be an object, then
|
|
* immutably merges the properties of each object and returns result.
|
|
*
|
|
* When multiple objects contain the same key the later object in
|
|
* the arguments list will take precedence.
|
|
*
|
|
* Example:
|
|
*
|
|
* ```js
|
|
* var result = merge({foo: 123}, {foo: 456});
|
|
* console.log(result.foo); // outputs 456
|
|
* ```
|
|
*
|
|
* @param {Object} obj1 Object to merge
|
|
*
|
|
* @returns {Object} Result of all merge properties
|
|
*/
|
|
function merge(/* obj1, obj2, obj3, ... */) {
|
|
const {caseless} = isContextDefined(this) && this || {};
|
|
const result = {};
|
|
const assignValue = (val, key) => {
|
|
const targetKey = caseless && findKey(result, key) || key;
|
|
if (isPlainObject(result[targetKey]) && isPlainObject(val)) {
|
|
result[targetKey] = merge(result[targetKey], val);
|
|
} else if (isPlainObject(val)) {
|
|
result[targetKey] = merge({}, val);
|
|
} else if (isArray(val)) {
|
|
result[targetKey] = val.slice();
|
|
} else {
|
|
result[targetKey] = val;
|
|
}
|
|
};
|
|
|
|
for (let i = 0, l = arguments.length; i < l; i++) {
|
|
arguments[i] && forEach(arguments[i], assignValue);
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* Extends object a by mutably adding to it the properties of object b.
|
|
*
|
|
* @param {Object} a The object to be extended
|
|
* @param {Object} b The object to copy properties from
|
|
* @param {Object} thisArg The object to bind function to
|
|
*
|
|
* @param {Boolean} [allOwnKeys]
|
|
* @returns {Object} The resulting value of object a
|
|
*/
|
|
const extend = (a, b, thisArg, {allOwnKeys}= {}) => {
|
|
forEach(b, (val, key) => {
|
|
if (thisArg && isFunction(val)) {
|
|
a[key] = bind(val, thisArg);
|
|
} else {
|
|
a[key] = val;
|
|
}
|
|
}, {allOwnKeys});
|
|
return a;
|
|
};
|
|
|
|
/**
|
|
* Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
|
|
*
|
|
* @param {string} content with BOM
|
|
*
|
|
* @returns {string} content value without BOM
|
|
*/
|
|
const stripBOM = (content) => {
|
|
if (content.charCodeAt(0) === 0xFEFF) {
|
|
content = content.slice(1);
|
|
}
|
|
return content;
|
|
};
|
|
|
|
/**
|
|
* Inherit the prototype methods from one constructor into another
|
|
* @param {function} constructor
|
|
* @param {function} superConstructor
|
|
* @param {object} [props]
|
|
* @param {object} [descriptors]
|
|
*
|
|
* @returns {void}
|
|
*/
|
|
const inherits = (constructor, superConstructor, props, descriptors) => {
|
|
constructor.prototype = Object.create(superConstructor.prototype, descriptors);
|
|
constructor.prototype.constructor = constructor;
|
|
Object.defineProperty(constructor, 'super', {
|
|
value: superConstructor.prototype
|
|
});
|
|
props && Object.assign(constructor.prototype, props);
|
|
};
|
|
|
|
/**
|
|
* Resolve object with deep prototype chain to a flat object
|
|
* @param {Object} sourceObj source object
|
|
* @param {Object} [destObj]
|
|
* @param {Function|Boolean} [filter]
|
|
* @param {Function} [propFilter]
|
|
*
|
|
* @returns {Object}
|
|
*/
|
|
const toFlatObject = (sourceObj, destObj, filter, propFilter) => {
|
|
let props;
|
|
let i;
|
|
let prop;
|
|
const merged = {};
|
|
|
|
destObj = destObj || {};
|
|
// eslint-disable-next-line no-eq-null,eqeqeq
|
|
if (sourceObj == null) return destObj;
|
|
|
|
do {
|
|
props = Object.getOwnPropertyNames(sourceObj);
|
|
i = props.length;
|
|
while (i-- > 0) {
|
|
prop = props[i];
|
|
if ((!propFilter || propFilter(prop, sourceObj, destObj)) && !merged[prop]) {
|
|
destObj[prop] = sourceObj[prop];
|
|
merged[prop] = true;
|
|
}
|
|
}
|
|
sourceObj = filter !== false && getPrototypeOf(sourceObj);
|
|
} while (sourceObj && (!filter || filter(sourceObj, destObj)) && sourceObj !== Object.prototype);
|
|
|
|
return destObj;
|
|
};
|
|
|
|
/**
|
|
* Determines whether a string ends with the characters of a specified string
|
|
*
|
|
* @param {String} str
|
|
* @param {String} searchString
|
|
* @param {Number} [position= 0]
|
|
*
|
|
* @returns {boolean}
|
|
*/
|
|
const endsWith = (str, searchString, position) => {
|
|
str = String(str);
|
|
if (position === undefined || position > str.length) {
|
|
position = str.length;
|
|
}
|
|
position -= searchString.length;
|
|
const lastIndex = str.indexOf(searchString, position);
|
|
return lastIndex !== -1 && lastIndex === position;
|
|
};
|
|
|
|
|
|
/**
|
|
* Returns new array from array like object or null if failed
|
|
*
|
|
* @param {*} [thing]
|
|
*
|
|
* @returns {?Array}
|
|
*/
|
|
const toArray = (thing) => {
|
|
if (!thing) return null;
|
|
if (isArray(thing)) return thing;
|
|
let i = thing.length;
|
|
if (!isNumber(i)) return null;
|
|
const arr = new Array(i);
|
|
while (i-- > 0) {
|
|
arr[i] = thing[i];
|
|
}
|
|
return arr;
|
|
};
|
|
|
|
/**
|
|
* Checking if the Uint8Array exists and if it does, it returns a function that checks if the
|
|
* thing passed in is an instance of Uint8Array
|
|
*
|
|
* @param {TypedArray}
|
|
*
|
|
* @returns {Array}
|
|
*/
|
|
// eslint-disable-next-line func-names
|
|
const isTypedArray = (TypedArray => {
|
|
// eslint-disable-next-line func-names
|
|
return thing => {
|
|
return TypedArray && thing instanceof TypedArray;
|
|
};
|
|
})(typeof Uint8Array !== 'undefined' && getPrototypeOf(Uint8Array));
|
|
|
|
/**
|
|
* For each entry in the object, call the function with the key and value.
|
|
*
|
|
* @param {Object<any, any>} obj - The object to iterate over.
|
|
* @param {Function} fn - The function to call for each entry.
|
|
*
|
|
* @returns {void}
|
|
*/
|
|
const forEachEntry = (obj, fn) => {
|
|
const generator = obj && obj[iterator];
|
|
|
|
const _iterator = generator.call(obj);
|
|
|
|
let result;
|
|
|
|
while ((result = _iterator.next()) && !result.done) {
|
|
const pair = result.value;
|
|
fn.call(obj, pair[0], pair[1]);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* It takes a regular expression and a string, and returns an array of all the matches
|
|
*
|
|
* @param {string} regExp - The regular expression to match against.
|
|
* @param {string} str - The string to search.
|
|
*
|
|
* @returns {Array<boolean>}
|
|
*/
|
|
const matchAll = (regExp, str) => {
|
|
let matches;
|
|
const arr = [];
|
|
|
|
while ((matches = regExp.exec(str)) !== null) {
|
|
arr.push(matches);
|
|
}
|
|
|
|
return arr;
|
|
};
|
|
|
|
/* Checking if the kindOfTest function returns true when passed an HTMLFormElement. */
|
|
const isHTMLForm = kindOfTest('HTMLFormElement');
|
|
|
|
const toCamelCase = str => {
|
|
return str.toLowerCase().replace(/[-_\s]([a-z\d])(\w*)/g,
|
|
function replacer(m, p1, p2) {
|
|
return p1.toUpperCase() + p2;
|
|
}
|
|
);
|
|
};
|
|
|
|
/* Creating a function that will check if an object has a property. */
|
|
const hasOwnProperty = (({hasOwnProperty}) => (obj, prop) => hasOwnProperty.call(obj, prop))(Object.prototype);
|
|
|
|
/**
|
|
* Determine if a value is a RegExp object
|
|
*
|
|
* @param {*} val The value to test
|
|
*
|
|
* @returns {boolean} True if value is a RegExp object, otherwise false
|
|
*/
|
|
const isRegExp = kindOfTest('RegExp');
|
|
|
|
const reduceDescriptors = (obj, reducer) => {
|
|
const descriptors = Object.getOwnPropertyDescriptors(obj);
|
|
const reducedDescriptors = {};
|
|
|
|
forEach(descriptors, (descriptor, name) => {
|
|
let ret;
|
|
if ((ret = reducer(descriptor, name, obj)) !== false) {
|
|
reducedDescriptors[name] = ret || descriptor;
|
|
}
|
|
});
|
|
|
|
Object.defineProperties(obj, reducedDescriptors);
|
|
};
|
|
|
|
/**
|
|
* Makes all methods read-only
|
|
* @param {Object} obj
|
|
*/
|
|
|
|
const freezeMethods = (obj) => {
|
|
reduceDescriptors(obj, (descriptor, name) => {
|
|
// skip restricted props in strict mode
|
|
if (isFunction(obj) && ['arguments', 'caller', 'callee'].indexOf(name) !== -1) {
|
|
return false;
|
|
}
|
|
|
|
const value = obj[name];
|
|
|
|
if (!isFunction(value)) return;
|
|
|
|
descriptor.enumerable = false;
|
|
|
|
if ('writable' in descriptor) {
|
|
descriptor.writable = false;
|
|
return;
|
|
}
|
|
|
|
if (!descriptor.set) {
|
|
descriptor.set = () => {
|
|
throw Error('Can not rewrite read-only method \'' + name + '\'');
|
|
};
|
|
}
|
|
});
|
|
};
|
|
|
|
const toObjectSet = (arrayOrString, delimiter) => {
|
|
const obj = {};
|
|
|
|
const define = (arr) => {
|
|
arr.forEach(value => {
|
|
obj[value] = true;
|
|
});
|
|
};
|
|
|
|
isArray(arrayOrString) ? define(arrayOrString) : define(String(arrayOrString).split(delimiter));
|
|
|
|
return obj;
|
|
};
|
|
|
|
const noop = () => {};
|
|
|
|
const toFiniteNumber = (value, defaultValue) => {
|
|
return value != null && Number.isFinite(value = +value) ? value : defaultValue;
|
|
};
|
|
|
|
/**
|
|
* If the thing is a FormData object, return true, otherwise return false.
|
|
*
|
|
* @param {unknown} thing - The thing to check.
|
|
*
|
|
* @returns {boolean}
|
|
*/
|
|
function isSpecCompliantForm(thing) {
|
|
return !!(thing && isFunction(thing.append) && thing[toStringTag] === 'FormData' && thing[iterator]);
|
|
}
|
|
|
|
const toJSONObject = (obj) => {
|
|
const stack = new Array(10);
|
|
|
|
const visit = (source, i) => {
|
|
|
|
if (isObject(source)) {
|
|
if (stack.indexOf(source) >= 0) {
|
|
return;
|
|
}
|
|
|
|
//Buffer check
|
|
if (isBuffer(source)) {
|
|
return source;
|
|
}
|
|
|
|
if(!('toJSON' in source)) {
|
|
stack[i] = source;
|
|
const target = isArray(source) ? [] : {};
|
|
|
|
forEach(source, (value, key) => {
|
|
const reducedValue = visit(value, i + 1);
|
|
!isUndefined(reducedValue) && (target[key] = reducedValue);
|
|
});
|
|
|
|
stack[i] = undefined;
|
|
|
|
return target;
|
|
}
|
|
}
|
|
|
|
return source;
|
|
};
|
|
|
|
return visit(obj, 0);
|
|
};
|
|
|
|
const isAsyncFn = kindOfTest('AsyncFunction');
|
|
|
|
const isThenable = (thing) =>
|
|
thing && (isObject(thing) || isFunction(thing)) && isFunction(thing.then) && isFunction(thing.catch);
|
|
|
|
// original code
|
|
// https://github.com/DigitalBrainJS/AxiosPromise/blob/16deab13710ec09779922131f3fa5954320f83ab/lib/utils.js#L11-L34
|
|
|
|
const _setImmediate = ((setImmediateSupported, postMessageSupported) => {
|
|
if (setImmediateSupported) {
|
|
return setImmediate;
|
|
}
|
|
|
|
return postMessageSupported ? ((token, callbacks) => {
|
|
_global.addEventListener("message", ({source, data}) => {
|
|
if (source === _global && data === token) {
|
|
callbacks.length && callbacks.shift()();
|
|
}
|
|
}, false);
|
|
|
|
return (cb) => {
|
|
callbacks.push(cb);
|
|
_global.postMessage(token, "*");
|
|
}
|
|
})(`axios@${Math.random()}`, []) : (cb) => setTimeout(cb);
|
|
})(
|
|
typeof setImmediate === 'function',
|
|
isFunction(_global.postMessage)
|
|
);
|
|
|
|
const asap = typeof queueMicrotask !== 'undefined' ?
|
|
queueMicrotask.bind(_global) : ( typeof process !== 'undefined' && process.nextTick || _setImmediate);
|
|
|
|
// *********************
|
|
|
|
|
|
const isIterable = (thing) => thing != null && isFunction(thing[iterator]);
|
|
|
|
|
|
const utils$1 = {
|
|
isArray,
|
|
isArrayBuffer,
|
|
isBuffer,
|
|
isFormData,
|
|
isArrayBufferView,
|
|
isString,
|
|
isNumber,
|
|
isBoolean,
|
|
isObject,
|
|
isPlainObject,
|
|
isEmptyObject,
|
|
isReadableStream,
|
|
isRequest,
|
|
isResponse,
|
|
isHeaders,
|
|
isUndefined,
|
|
isDate,
|
|
isFile,
|
|
isBlob,
|
|
isRegExp,
|
|
isFunction,
|
|
isStream,
|
|
isURLSearchParams,
|
|
isTypedArray,
|
|
isFileList,
|
|
forEach,
|
|
merge,
|
|
extend,
|
|
trim,
|
|
stripBOM,
|
|
inherits,
|
|
toFlatObject,
|
|
kindOf,
|
|
kindOfTest,
|
|
endsWith,
|
|
toArray,
|
|
forEachEntry,
|
|
matchAll,
|
|
isHTMLForm,
|
|
hasOwnProperty,
|
|
hasOwnProp: hasOwnProperty, // an alias to avoid ESLint no-prototype-builtins detection
|
|
reduceDescriptors,
|
|
freezeMethods,
|
|
toObjectSet,
|
|
toCamelCase,
|
|
noop,
|
|
toFiniteNumber,
|
|
findKey,
|
|
global: _global,
|
|
isContextDefined,
|
|
isSpecCompliantForm,
|
|
toJSONObject,
|
|
isAsyncFn,
|
|
isThenable,
|
|
setImmediate: _setImmediate,
|
|
asap,
|
|
isIterable
|
|
};
|
|
|
|
/**
|
|
* Create an Error with the specified message, config, error code, request and response.
|
|
*
|
|
* @param {string} message The error message.
|
|
* @param {string} [code] The error code (for example, 'ECONNABORTED').
|
|
* @param {Object} [config] The config.
|
|
* @param {Object} [request] The request.
|
|
* @param {Object} [response] The response.
|
|
*
|
|
* @returns {Error} The created error.
|
|
*/
|
|
function AxiosError$1(message, code, config, request, response) {
|
|
Error.call(this);
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
} else {
|
|
this.stack = (new Error()).stack;
|
|
}
|
|
|
|
this.message = message;
|
|
this.name = 'AxiosError';
|
|
code && (this.code = code);
|
|
config && (this.config = config);
|
|
request && (this.request = request);
|
|
if (response) {
|
|
this.response = response;
|
|
this.status = response.status ? response.status : null;
|
|
}
|
|
}
|
|
|
|
utils$1.inherits(AxiosError$1, Error, {
|
|
toJSON: function toJSON() {
|
|
return {
|
|
// Standard
|
|
message: this.message,
|
|
name: this.name,
|
|
// Microsoft
|
|
description: this.description,
|
|
number: this.number,
|
|
// Mozilla
|
|
fileName: this.fileName,
|
|
lineNumber: this.lineNumber,
|
|
columnNumber: this.columnNumber,
|
|
stack: this.stack,
|
|
// Axios
|
|
config: utils$1.toJSONObject(this.config),
|
|
code: this.code,
|
|
status: this.status
|
|
};
|
|
}
|
|
});
|
|
|
|
const prototype$1 = AxiosError$1.prototype;
|
|
const descriptors = {};
|
|
|
|
[
|
|
'ERR_BAD_OPTION_VALUE',
|
|
'ERR_BAD_OPTION',
|
|
'ECONNABORTED',
|
|
'ETIMEDOUT',
|
|
'ERR_NETWORK',
|
|
'ERR_FR_TOO_MANY_REDIRECTS',
|
|
'ERR_DEPRECATED',
|
|
'ERR_BAD_RESPONSE',
|
|
'ERR_BAD_REQUEST',
|
|
'ERR_CANCELED',
|
|
'ERR_NOT_SUPPORT',
|
|
'ERR_INVALID_URL'
|
|
// eslint-disable-next-line func-names
|
|
].forEach(code => {
|
|
descriptors[code] = {value: code};
|
|
});
|
|
|
|
Object.defineProperties(AxiosError$1, descriptors);
|
|
Object.defineProperty(prototype$1, 'isAxiosError', {value: true});
|
|
|
|
// eslint-disable-next-line func-names
|
|
AxiosError$1.from = (error, code, config, request, response, customProps) => {
|
|
const axiosError = Object.create(prototype$1);
|
|
|
|
utils$1.toFlatObject(error, axiosError, function filter(obj) {
|
|
return obj !== Error.prototype;
|
|
}, prop => {
|
|
return prop !== 'isAxiosError';
|
|
});
|
|
|
|
AxiosError$1.call(axiosError, error.message, code, config, request, response);
|
|
|
|
axiosError.cause = error;
|
|
|
|
axiosError.name = error.name;
|
|
|
|
customProps && Object.assign(axiosError, customProps);
|
|
|
|
return axiosError;
|
|
};
|
|
|
|
// eslint-disable-next-line strict
|
|
const httpAdapter = null;
|
|
|
|
/**
|
|
* Determines if the given thing is a array or js object.
|
|
*
|
|
* @param {string} thing - The object or array to be visited.
|
|
*
|
|
* @returns {boolean}
|
|
*/
|
|
function isVisitable(thing) {
|
|
return utils$1.isPlainObject(thing) || utils$1.isArray(thing);
|
|
}
|
|
|
|
/**
|
|
* It removes the brackets from the end of a string
|
|
*
|
|
* @param {string} key - The key of the parameter.
|
|
*
|
|
* @returns {string} the key without the brackets.
|
|
*/
|
|
function removeBrackets(key) {
|
|
return utils$1.endsWith(key, '[]') ? key.slice(0, -2) : key;
|
|
}
|
|
|
|
/**
|
|
* It takes a path, a key, and a boolean, and returns a string
|
|
*
|
|
* @param {string} path - The path to the current key.
|
|
* @param {string} key - The key of the current object being iterated over.
|
|
* @param {string} dots - If true, the key will be rendered with dots instead of brackets.
|
|
*
|
|
* @returns {string} The path to the current key.
|
|
*/
|
|
function renderKey(path, key, dots) {
|
|
if (!path) return key;
|
|
return path.concat(key).map(function each(token, i) {
|
|
// eslint-disable-next-line no-param-reassign
|
|
token = removeBrackets(token);
|
|
return !dots && i ? '[' + token + ']' : token;
|
|
}).join(dots ? '.' : '');
|
|
}
|
|
|
|
/**
|
|
* If the array is an array and none of its elements are visitable, then it's a flat array.
|
|
*
|
|
* @param {Array<any>} arr - The array to check
|
|
*
|
|
* @returns {boolean}
|
|
*/
|
|
function isFlatArray(arr) {
|
|
return utils$1.isArray(arr) && !arr.some(isVisitable);
|
|
}
|
|
|
|
const predicates = utils$1.toFlatObject(utils$1, {}, null, function filter(prop) {
|
|
return /^is[A-Z]/.test(prop);
|
|
});
|
|
|
|
/**
|
|
* Convert a data object to FormData
|
|
*
|
|
* @param {Object} obj
|
|
* @param {?Object} [formData]
|
|
* @param {?Object} [options]
|
|
* @param {Function} [options.visitor]
|
|
* @param {Boolean} [options.metaTokens = true]
|
|
* @param {Boolean} [options.dots = false]
|
|
* @param {?Boolean} [options.indexes = false]
|
|
*
|
|
* @returns {Object}
|
|
**/
|
|
|
|
/**
|
|
* It converts an object into a FormData object
|
|
*
|
|
* @param {Object<any, any>} obj - The object to convert to form data.
|
|
* @param {string} formData - The FormData object to append to.
|
|
* @param {Object<string, any>} options
|
|
*
|
|
* @returns
|
|
*/
|
|
function toFormData$1(obj, formData, options) {
|
|
if (!utils$1.isObject(obj)) {
|
|
throw new TypeError('target must be an object');
|
|
}
|
|
|
|
// eslint-disable-next-line no-param-reassign
|
|
formData = formData || new (FormData)();
|
|
|
|
// eslint-disable-next-line no-param-reassign
|
|
options = utils$1.toFlatObject(options, {
|
|
metaTokens: true,
|
|
dots: false,
|
|
indexes: false
|
|
}, false, function defined(option, source) {
|
|
// eslint-disable-next-line no-eq-null,eqeqeq
|
|
return !utils$1.isUndefined(source[option]);
|
|
});
|
|
|
|
const metaTokens = options.metaTokens;
|
|
// eslint-disable-next-line no-use-before-define
|
|
const visitor = options.visitor || defaultVisitor;
|
|
const dots = options.dots;
|
|
const indexes = options.indexes;
|
|
const _Blob = options.Blob || typeof Blob !== 'undefined' && Blob;
|
|
const useBlob = _Blob && utils$1.isSpecCompliantForm(formData);
|
|
|
|
if (!utils$1.isFunction(visitor)) {
|
|
throw new TypeError('visitor must be a function');
|
|
}
|
|
|
|
function convertValue(value) {
|
|
if (value === null) return '';
|
|
|
|
if (utils$1.isDate(value)) {
|
|
return value.toISOString();
|
|
}
|
|
|
|
if (utils$1.isBoolean(value)) {
|
|
return value.toString();
|
|
}
|
|
|
|
if (!useBlob && utils$1.isBlob(value)) {
|
|
throw new AxiosError$1('Blob is not supported. Use a Buffer instead.');
|
|
}
|
|
|
|
if (utils$1.isArrayBuffer(value) || utils$1.isTypedArray(value)) {
|
|
return useBlob && typeof Blob === 'function' ? new Blob([value]) : Buffer.from(value);
|
|
}
|
|
|
|
return value;
|
|
}
|
|
|
|
/**
|
|
* Default visitor.
|
|
*
|
|
* @param {*} value
|
|
* @param {String|Number} key
|
|
* @param {Array<String|Number>} path
|
|
* @this {FormData}
|
|
*
|
|
* @returns {boolean} return true to visit the each prop of the value recursively
|
|
*/
|
|
function defaultVisitor(value, key, path) {
|
|
let arr = value;
|
|
|
|
if (value && !path && typeof value === 'object') {
|
|
if (utils$1.endsWith(key, '{}')) {
|
|
// eslint-disable-next-line no-param-reassign
|
|
key = metaTokens ? key : key.slice(0, -2);
|
|
// eslint-disable-next-line no-param-reassign
|
|
value = JSON.stringify(value);
|
|
} else if (
|
|
(utils$1.isArray(value) && isFlatArray(value)) ||
|
|
((utils$1.isFileList(value) || utils$1.endsWith(key, '[]')) && (arr = utils$1.toArray(value))
|
|
)) {
|
|
// eslint-disable-next-line no-param-reassign
|
|
key = removeBrackets(key);
|
|
|
|
arr.forEach(function each(el, index) {
|
|
!(utils$1.isUndefined(el) || el === null) && formData.append(
|
|
// eslint-disable-next-line no-nested-ternary
|
|
indexes === true ? renderKey([key], index, dots) : (indexes === null ? key : key + '[]'),
|
|
convertValue(el)
|
|
);
|
|
});
|
|
return false;
|
|
}
|
|
}
|
|
|
|
if (isVisitable(value)) {
|
|
return true;
|
|
}
|
|
|
|
formData.append(renderKey(path, key, dots), convertValue(value));
|
|
|
|
return false;
|
|
}
|
|
|
|
const stack = [];
|
|
|
|
const exposedHelpers = Object.assign(predicates, {
|
|
defaultVisitor,
|
|
convertValue,
|
|
isVisitable
|
|
});
|
|
|
|
function build(value, path) {
|
|
if (utils$1.isUndefined(value)) return;
|
|
|
|
if (stack.indexOf(value) !== -1) {
|
|
throw Error('Circular reference detected in ' + path.join('.'));
|
|
}
|
|
|
|
stack.push(value);
|
|
|
|
utils$1.forEach(value, function each(el, key) {
|
|
const result = !(utils$1.isUndefined(el) || el === null) && visitor.call(
|
|
formData, el, utils$1.isString(key) ? key.trim() : key, path, exposedHelpers
|
|
);
|
|
|
|
if (result === true) {
|
|
build(el, path ? path.concat(key) : [key]);
|
|
}
|
|
});
|
|
|
|
stack.pop();
|
|
}
|
|
|
|
if (!utils$1.isObject(obj)) {
|
|
throw new TypeError('data must be an object');
|
|
}
|
|
|
|
build(obj);
|
|
|
|
return formData;
|
|
}
|
|
|
|
/**
|
|
* It encodes a string by replacing all characters that are not in the unreserved set with
|
|
* their percent-encoded equivalents
|
|
*
|
|
* @param {string} str - The string to encode.
|
|
*
|
|
* @returns {string} The encoded string.
|
|
*/
|
|
function encode$1(str) {
|
|
const charMap = {
|
|
'!': '%21',
|
|
"'": '%27',
|
|
'(': '%28',
|
|
')': '%29',
|
|
'~': '%7E',
|
|
'%20': '+',
|
|
'%00': '\x00'
|
|
};
|
|
return encodeURIComponent(str).replace(/[!'()~]|%20|%00/g, function replacer(match) {
|
|
return charMap[match];
|
|
});
|
|
}
|
|
|
|
/**
|
|
* It takes a params object and converts it to a FormData object
|
|
*
|
|
* @param {Object<string, any>} params - The parameters to be converted to a FormData object.
|
|
* @param {Object<string, any>} options - The options object passed to the Axios constructor.
|
|
*
|
|
* @returns {void}
|
|
*/
|
|
function AxiosURLSearchParams(params, options) {
|
|
this._pairs = [];
|
|
|
|
params && toFormData$1(params, this, options);
|
|
}
|
|
|
|
const prototype = AxiosURLSearchParams.prototype;
|
|
|
|
prototype.append = function append(name, value) {
|
|
this._pairs.push([name, value]);
|
|
};
|
|
|
|
prototype.toString = function toString(encoder) {
|
|
const _encode = encoder ? function(value) {
|
|
return encoder.call(this, value, encode$1);
|
|
} : encode$1;
|
|
|
|
return this._pairs.map(function each(pair) {
|
|
return _encode(pair[0]) + '=' + _encode(pair[1]);
|
|
}, '').join('&');
|
|
};
|
|
|
|
/**
|
|
* It replaces all instances of the characters `:`, `$`, `,`, `+`, `[`, and `]` with their
|
|
* URI encoded counterparts
|
|
*
|
|
* @param {string} val The value to be encoded.
|
|
*
|
|
* @returns {string} The encoded value.
|
|
*/
|
|
function encode(val) {
|
|
return encodeURIComponent(val).
|
|
replace(/%3A/gi, ':').
|
|
replace(/%24/g, '$').
|
|
replace(/%2C/gi, ',').
|
|
replace(/%20/g, '+').
|
|
replace(/%5B/gi, '[').
|
|
replace(/%5D/gi, ']');
|
|
}
|
|
|
|
/**
|
|
* Build a URL by appending params to the end
|
|
*
|
|
* @param {string} url The base of the url (e.g., http://www.google.com)
|
|
* @param {object} [params] The params to be appended
|
|
* @param {?(object|Function)} options
|
|
*
|
|
* @returns {string} The formatted url
|
|
*/
|
|
function buildURL(url, params, options) {
|
|
/*eslint no-param-reassign:0*/
|
|
if (!params) {
|
|
return url;
|
|
}
|
|
|
|
const _encode = options && options.encode || encode;
|
|
|
|
if (utils$1.isFunction(options)) {
|
|
options = {
|
|
serialize: options
|
|
};
|
|
}
|
|
|
|
const serializeFn = options && options.serialize;
|
|
|
|
let serializedParams;
|
|
|
|
if (serializeFn) {
|
|
serializedParams = serializeFn(params, options);
|
|
} else {
|
|
serializedParams = utils$1.isURLSearchParams(params) ?
|
|
params.toString() :
|
|
new AxiosURLSearchParams(params, options).toString(_encode);
|
|
}
|
|
|
|
if (serializedParams) {
|
|
const hashmarkIndex = url.indexOf("#");
|
|
|
|
if (hashmarkIndex !== -1) {
|
|
url = url.slice(0, hashmarkIndex);
|
|
}
|
|
url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams;
|
|
}
|
|
|
|
return url;
|
|
}
|
|
|
|
class InterceptorManager {
|
|
constructor() {
|
|
this.handlers = [];
|
|
}
|
|
|
|
/**
|
|
* Add a new interceptor to the stack
|
|
*
|
|
* @param {Function} fulfilled The function to handle `then` for a `Promise`
|
|
* @param {Function} rejected The function to handle `reject` for a `Promise`
|
|
*
|
|
* @return {Number} An ID used to remove interceptor later
|
|
*/
|
|
use(fulfilled, rejected, options) {
|
|
this.handlers.push({
|
|
fulfilled,
|
|
rejected,
|
|
synchronous: options ? options.synchronous : false,
|
|
runWhen: options ? options.runWhen : null
|
|
});
|
|
return this.handlers.length - 1;
|
|
}
|
|
|
|
/**
|
|
* Remove an interceptor from the stack
|
|
*
|
|
* @param {Number} id The ID that was returned by `use`
|
|
*
|
|
* @returns {Boolean} `true` if the interceptor was removed, `false` otherwise
|
|
*/
|
|
eject(id) {
|
|
if (this.handlers[id]) {
|
|
this.handlers[id] = null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Clear all interceptors from the stack
|
|
*
|
|
* @returns {void}
|
|
*/
|
|
clear() {
|
|
if (this.handlers) {
|
|
this.handlers = [];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Iterate over all the registered interceptors
|
|
*
|
|
* This method is particularly useful for skipping over any
|
|
* interceptors that may have become `null` calling `eject`.
|
|
*
|
|
* @param {Function} fn The function to call for each interceptor
|
|
*
|
|
* @returns {void}
|
|
*/
|
|
forEach(fn) {
|
|
utils$1.forEach(this.handlers, function forEachHandler(h) {
|
|
if (h !== null) {
|
|
fn(h);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
const transitionalDefaults = {
|
|
silentJSONParsing: true,
|
|
forcedJSONParsing: true,
|
|
clarifyTimeoutError: false
|
|
};
|
|
|
|
const URLSearchParams$1 = typeof URLSearchParams !== 'undefined' ? URLSearchParams : AxiosURLSearchParams;
|
|
|
|
const FormData$1 = typeof FormData !== 'undefined' ? FormData : null;
|
|
|
|
const Blob$1 = typeof Blob !== 'undefined' ? Blob : null;
|
|
|
|
const platform$1 = {
|
|
isBrowser: true,
|
|
classes: {
|
|
URLSearchParams: URLSearchParams$1,
|
|
FormData: FormData$1,
|
|
Blob: Blob$1
|
|
},
|
|
protocols: ['http', 'https', 'file', 'blob', 'url', 'data']
|
|
};
|
|
|
|
const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined';
|
|
|
|
const _navigator = typeof navigator === 'object' && navigator || undefined;
|
|
|
|
/**
|
|
* Determine if we're running in a standard browser environment
|
|
*
|
|
* This allows axios to run in a web worker, and react-native.
|
|
* Both environments support XMLHttpRequest, but not fully standard globals.
|
|
*
|
|
* web workers:
|
|
* typeof window -> undefined
|
|
* typeof document -> undefined
|
|
*
|
|
* react-native:
|
|
* navigator.product -> 'ReactNative'
|
|
* nativescript
|
|
* navigator.product -> 'NativeScript' or 'NS'
|
|
*
|
|
* @returns {boolean}
|
|
*/
|
|
const hasStandardBrowserEnv = hasBrowserEnv &&
|
|
(!_navigator || ['ReactNative', 'NativeScript', 'NS'].indexOf(_navigator.product) < 0);
|
|
|
|
/**
|
|
* Determine if we're running in a standard browser webWorker environment
|
|
*
|
|
* Although the `isStandardBrowserEnv` method indicates that
|
|
* `allows axios to run in a web worker`, the WebWorker will still be
|
|
* filtered out due to its judgment standard
|
|
* `typeof window !== 'undefined' && typeof document !== 'undefined'`.
|
|
* This leads to a problem when axios post `FormData` in webWorker
|
|
*/
|
|
const hasStandardBrowserWebWorkerEnv = (() => {
|
|
return (
|
|
typeof WorkerGlobalScope !== 'undefined' &&
|
|
// eslint-disable-next-line no-undef
|
|
self instanceof WorkerGlobalScope &&
|
|
typeof self.importScripts === 'function'
|
|
);
|
|
})();
|
|
|
|
const origin = hasBrowserEnv && window.location.href || 'http://localhost';
|
|
|
|
const utils = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty({
|
|
__proto__: null,
|
|
hasBrowserEnv,
|
|
hasStandardBrowserEnv,
|
|
hasStandardBrowserWebWorkerEnv,
|
|
navigator: _navigator,
|
|
origin
|
|
}, Symbol.toStringTag, { value: 'Module' }));
|
|
|
|
const platform = {
|
|
...utils,
|
|
...platform$1
|
|
};
|
|
|
|
function toURLEncodedForm(data, options) {
|
|
return toFormData$1(data, new platform.classes.URLSearchParams(), {
|
|
visitor: function(value, key, path, helpers) {
|
|
if (platform.isNode && utils$1.isBuffer(value)) {
|
|
this.append(key, value.toString('base64'));
|
|
return false;
|
|
}
|
|
|
|
return helpers.defaultVisitor.apply(this, arguments);
|
|
},
|
|
...options
|
|
});
|
|
}
|
|
|
|
/**
|
|
* It takes a string like `foo[x][y][z]` and returns an array like `['foo', 'x', 'y', 'z']
|
|
*
|
|
* @param {string} name - The name of the property to get.
|
|
*
|
|
* @returns An array of strings.
|
|
*/
|
|
function parsePropPath(name) {
|
|
// foo[x][y][z]
|
|
// foo.x.y.z
|
|
// foo-x-y-z
|
|
// foo x y z
|
|
return utils$1.matchAll(/\w+|\[(\w*)]/g, name).map(match => {
|
|
return match[0] === '[]' ? '' : match[1] || match[0];
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Convert an array to an object.
|
|
*
|
|
* @param {Array<any>} arr - The array to convert to an object.
|
|
*
|
|
* @returns An object with the same keys and values as the array.
|
|
*/
|
|
function arrayToObject(arr) {
|
|
const obj = {};
|
|
const keys = Object.keys(arr);
|
|
let i;
|
|
const len = keys.length;
|
|
let key;
|
|
for (i = 0; i < len; i++) {
|
|
key = keys[i];
|
|
obj[key] = arr[key];
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/**
|
|
* It takes a FormData object and returns a JavaScript object
|
|
*
|
|
* @param {string} formData The FormData object to convert to JSON.
|
|
*
|
|
* @returns {Object<string, any> | null} The converted object.
|
|
*/
|
|
function formDataToJSON(formData) {
|
|
function buildPath(path, value, target, index) {
|
|
let name = path[index++];
|
|
|
|
if (name === '__proto__') return true;
|
|
|
|
const isNumericKey = Number.isFinite(+name);
|
|
const isLast = index >= path.length;
|
|
name = !name && utils$1.isArray(target) ? target.length : name;
|
|
|
|
if (isLast) {
|
|
if (utils$1.hasOwnProp(target, name)) {
|
|
target[name] = [target[name], value];
|
|
} else {
|
|
target[name] = value;
|
|
}
|
|
|
|
return !isNumericKey;
|
|
}
|
|
|
|
if (!target[name] || !utils$1.isObject(target[name])) {
|
|
target[name] = [];
|
|
}
|
|
|
|
const result = buildPath(path, value, target[name], index);
|
|
|
|
if (result && utils$1.isArray(target[name])) {
|
|
target[name] = arrayToObject(target[name]);
|
|
}
|
|
|
|
return !isNumericKey;
|
|
}
|
|
|
|
if (utils$1.isFormData(formData) && utils$1.isFunction(formData.entries)) {
|
|
const obj = {};
|
|
|
|
utils$1.forEachEntry(formData, (name, value) => {
|
|
buildPath(parsePropPath(name), value, obj, 0);
|
|
});
|
|
|
|
return obj;
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
/**
|
|
* It takes a string, tries to parse it, and if it fails, it returns the stringified version
|
|
* of the input
|
|
*
|
|
* @param {any} rawValue - The value to be stringified.
|
|
* @param {Function} parser - A function that parses a string into a JavaScript object.
|
|
* @param {Function} encoder - A function that takes a value and returns a string.
|
|
*
|
|
* @returns {string} A stringified version of the rawValue.
|
|
*/
|
|
function stringifySafely(rawValue, parser, encoder) {
|
|
if (utils$1.isString(rawValue)) {
|
|
try {
|
|
(parser || JSON.parse)(rawValue);
|
|
return utils$1.trim(rawValue);
|
|
} catch (e) {
|
|
if (e.name !== 'SyntaxError') {
|
|
throw e;
|
|
}
|
|
}
|
|
}
|
|
|
|
return (encoder || JSON.stringify)(rawValue);
|
|
}
|
|
|
|
const defaults = {
|
|
|
|
transitional: transitionalDefaults,
|
|
|
|
adapter: ['xhr', 'http', 'fetch'],
|
|
|
|
transformRequest: [function transformRequest(data, headers) {
|
|
const contentType = headers.getContentType() || '';
|
|
const hasJSONContentType = contentType.indexOf('application/json') > -1;
|
|
const isObjectPayload = utils$1.isObject(data);
|
|
|
|
if (isObjectPayload && utils$1.isHTMLForm(data)) {
|
|
data = new FormData(data);
|
|
}
|
|
|
|
const isFormData = utils$1.isFormData(data);
|
|
|
|
if (isFormData) {
|
|
return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data;
|
|
}
|
|
|
|
if (utils$1.isArrayBuffer(data) ||
|
|
utils$1.isBuffer(data) ||
|
|
utils$1.isStream(data) ||
|
|
utils$1.isFile(data) ||
|
|
utils$1.isBlob(data) ||
|
|
utils$1.isReadableStream(data)
|
|
) {
|
|
return data;
|
|
}
|
|
if (utils$1.isArrayBufferView(data)) {
|
|
return data.buffer;
|
|
}
|
|
if (utils$1.isURLSearchParams(data)) {
|
|
headers.setContentType('application/x-www-form-urlencoded;charset=utf-8', false);
|
|
return data.toString();
|
|
}
|
|
|
|
let isFileList;
|
|
|
|
if (isObjectPayload) {
|
|
if (contentType.indexOf('application/x-www-form-urlencoded') > -1) {
|
|
return toURLEncodedForm(data, this.formSerializer).toString();
|
|
}
|
|
|
|
if ((isFileList = utils$1.isFileList(data)) || contentType.indexOf('multipart/form-data') > -1) {
|
|
const _FormData = this.env && this.env.FormData;
|
|
|
|
return toFormData$1(
|
|
isFileList ? {'files[]': data} : data,
|
|
_FormData && new _FormData(),
|
|
this.formSerializer
|
|
);
|
|
}
|
|
}
|
|
|
|
if (isObjectPayload || hasJSONContentType ) {
|
|
headers.setContentType('application/json', false);
|
|
return stringifySafely(data);
|
|
}
|
|
|
|
return data;
|
|
}],
|
|
|
|
transformResponse: [function transformResponse(data) {
|
|
const transitional = this.transitional || defaults.transitional;
|
|
const forcedJSONParsing = transitional && transitional.forcedJSONParsing;
|
|
const JSONRequested = this.responseType === 'json';
|
|
|
|
if (utils$1.isResponse(data) || utils$1.isReadableStream(data)) {
|
|
return data;
|
|
}
|
|
|
|
if (data && utils$1.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) {
|
|
const silentJSONParsing = transitional && transitional.silentJSONParsing;
|
|
const strictJSONParsing = !silentJSONParsing && JSONRequested;
|
|
|
|
try {
|
|
return JSON.parse(data);
|
|
} catch (e) {
|
|
if (strictJSONParsing) {
|
|
if (e.name === 'SyntaxError') {
|
|
throw AxiosError$1.from(e, AxiosError$1.ERR_BAD_RESPONSE, this, null, this.response);
|
|
}
|
|
throw e;
|
|
}
|
|
}
|
|
}
|
|
|
|
return data;
|
|
}],
|
|
|
|
/**
|
|
* A timeout in milliseconds to abort a request. If set to 0 (default) a
|
|
* timeout is not created.
|
|
*/
|
|
timeout: 0,
|
|
|
|
xsrfCookieName: 'XSRF-TOKEN',
|
|
xsrfHeaderName: 'X-XSRF-TOKEN',
|
|
|
|
maxContentLength: -1,
|
|
maxBodyLength: -1,
|
|
|
|
env: {
|
|
FormData: platform.classes.FormData,
|
|
Blob: platform.classes.Blob
|
|
},
|
|
|
|
validateStatus: function validateStatus(status) {
|
|
return status >= 200 && status < 300;
|
|
},
|
|
|
|
headers: {
|
|
common: {
|
|
'Accept': 'application/json, text/plain, */*',
|
|
'Content-Type': undefined
|
|
}
|
|
}
|
|
};
|
|
|
|
utils$1.forEach(['delete', 'get', 'head', 'post', 'put', 'patch'], (method) => {
|
|
defaults.headers[method] = {};
|
|
});
|
|
|
|
// RawAxiosHeaders whose duplicates are ignored by node
|
|
// c.f. https://nodejs.org/api/http.html#http_message_headers
|
|
const ignoreDuplicateOf = utils$1.toObjectSet([
|
|
'age', 'authorization', 'content-length', 'content-type', 'etag',
|
|
'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since',
|
|
'last-modified', 'location', 'max-forwards', 'proxy-authorization',
|
|
'referer', 'retry-after', 'user-agent'
|
|
]);
|
|
|
|
/**
|
|
* Parse headers into an object
|
|
*
|
|
* ```
|
|
* Date: Wed, 27 Aug 2014 08:58:49 GMT
|
|
* Content-Type: application/json
|
|
* Connection: keep-alive
|
|
* Transfer-Encoding: chunked
|
|
* ```
|
|
*
|
|
* @param {String} rawHeaders Headers needing to be parsed
|
|
*
|
|
* @returns {Object} Headers parsed into an object
|
|
*/
|
|
const parseHeaders = rawHeaders => {
|
|
const parsed = {};
|
|
let key;
|
|
let val;
|
|
let i;
|
|
|
|
rawHeaders && rawHeaders.split('\n').forEach(function parser(line) {
|
|
i = line.indexOf(':');
|
|
key = line.substring(0, i).trim().toLowerCase();
|
|
val = line.substring(i + 1).trim();
|
|
|
|
if (!key || (parsed[key] && ignoreDuplicateOf[key])) {
|
|
return;
|
|
}
|
|
|
|
if (key === 'set-cookie') {
|
|
if (parsed[key]) {
|
|
parsed[key].push(val);
|
|
} else {
|
|
parsed[key] = [val];
|
|
}
|
|
} else {
|
|
parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val;
|
|
}
|
|
});
|
|
|
|
return parsed;
|
|
};
|
|
|
|
const $internals = Symbol('internals');
|
|
|
|
function normalizeHeader(header) {
|
|
return header && String(header).trim().toLowerCase();
|
|
}
|
|
|
|
function normalizeValue(value) {
|
|
if (value === false || value == null) {
|
|
return value;
|
|
}
|
|
|
|
return utils$1.isArray(value) ? value.map(normalizeValue) : String(value);
|
|
}
|
|
|
|
function parseTokens(str) {
|
|
const tokens = Object.create(null);
|
|
const tokensRE = /([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;
|
|
let match;
|
|
|
|
while ((match = tokensRE.exec(str))) {
|
|
tokens[match[1]] = match[2];
|
|
}
|
|
|
|
return tokens;
|
|
}
|
|
|
|
const isValidHeaderName = (str) => /^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(str.trim());
|
|
|
|
function matchHeaderValue(context, value, header, filter, isHeaderNameFilter) {
|
|
if (utils$1.isFunction(filter)) {
|
|
return filter.call(this, value, header);
|
|
}
|
|
|
|
if (isHeaderNameFilter) {
|
|
value = header;
|
|
}
|
|
|
|
if (!utils$1.isString(value)) return;
|
|
|
|
if (utils$1.isString(filter)) {
|
|
return value.indexOf(filter) !== -1;
|
|
}
|
|
|
|
if (utils$1.isRegExp(filter)) {
|
|
return filter.test(value);
|
|
}
|
|
}
|
|
|
|
function formatHeader(header) {
|
|
return header.trim()
|
|
.toLowerCase().replace(/([a-z\d])(\w*)/g, (w, char, str) => {
|
|
return char.toUpperCase() + str;
|
|
});
|
|
}
|
|
|
|
function buildAccessors(obj, header) {
|
|
const accessorName = utils$1.toCamelCase(' ' + header);
|
|
|
|
['get', 'set', 'has'].forEach(methodName => {
|
|
Object.defineProperty(obj, methodName + accessorName, {
|
|
value: function(arg1, arg2, arg3) {
|
|
return this[methodName].call(this, header, arg1, arg2, arg3);
|
|
},
|
|
configurable: true
|
|
});
|
|
});
|
|
}
|
|
|
|
let AxiosHeaders$1 = class AxiosHeaders {
|
|
constructor(headers) {
|
|
headers && this.set(headers);
|
|
}
|
|
|
|
set(header, valueOrRewrite, rewrite) {
|
|
const self = this;
|
|
|
|
function setHeader(_value, _header, _rewrite) {
|
|
const lHeader = normalizeHeader(_header);
|
|
|
|
if (!lHeader) {
|
|
throw new Error('header name must be a non-empty string');
|
|
}
|
|
|
|
const key = utils$1.findKey(self, lHeader);
|
|
|
|
if(!key || self[key] === undefined || _rewrite === true || (_rewrite === undefined && self[key] !== false)) {
|
|
self[key || _header] = normalizeValue(_value);
|
|
}
|
|
}
|
|
|
|
const setHeaders = (headers, _rewrite) =>
|
|
utils$1.forEach(headers, (_value, _header) => setHeader(_value, _header, _rewrite));
|
|
|
|
if (utils$1.isPlainObject(header) || header instanceof this.constructor) {
|
|
setHeaders(header, valueOrRewrite);
|
|
} else if(utils$1.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) {
|
|
setHeaders(parseHeaders(header), valueOrRewrite);
|
|
} else if (utils$1.isObject(header) && utils$1.isIterable(header)) {
|
|
let obj = {}, dest, key;
|
|
for (const entry of header) {
|
|
if (!utils$1.isArray(entry)) {
|
|
throw TypeError('Object iterator must return a key-value pair');
|
|
}
|
|
|
|
obj[key = entry[0]] = (dest = obj[key]) ?
|
|
(utils$1.isArray(dest) ? [...dest, entry[1]] : [dest, entry[1]]) : entry[1];
|
|
}
|
|
|
|
setHeaders(obj, valueOrRewrite);
|
|
} else {
|
|
header != null && setHeader(valueOrRewrite, header, rewrite);
|
|
}
|
|
|
|
return this;
|
|
}
|
|
|
|
get(header, parser) {
|
|
header = normalizeHeader(header);
|
|
|
|
if (header) {
|
|
const key = utils$1.findKey(this, header);
|
|
|
|
if (key) {
|
|
const value = this[key];
|
|
|
|
if (!parser) {
|
|
return value;
|
|
}
|
|
|
|
if (parser === true) {
|
|
return parseTokens(value);
|
|
}
|
|
|
|
if (utils$1.isFunction(parser)) {
|
|
return parser.call(this, value, key);
|
|
}
|
|
|
|
if (utils$1.isRegExp(parser)) {
|
|
return parser.exec(value);
|
|
}
|
|
|
|
throw new TypeError('parser must be boolean|regexp|function');
|
|
}
|
|
}
|
|
}
|
|
|
|
has(header, matcher) {
|
|
header = normalizeHeader(header);
|
|
|
|
if (header) {
|
|
const key = utils$1.findKey(this, header);
|
|
|
|
return !!(key && this[key] !== undefined && (!matcher || matchHeaderValue(this, this[key], key, matcher)));
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
delete(header, matcher) {
|
|
const self = this;
|
|
let deleted = false;
|
|
|
|
function deleteHeader(_header) {
|
|
_header = normalizeHeader(_header);
|
|
|
|
if (_header) {
|
|
const key = utils$1.findKey(self, _header);
|
|
|
|
if (key && (!matcher || matchHeaderValue(self, self[key], key, matcher))) {
|
|
delete self[key];
|
|
|
|
deleted = true;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (utils$1.isArray(header)) {
|
|
header.forEach(deleteHeader);
|
|
} else {
|
|
deleteHeader(header);
|
|
}
|
|
|
|
return deleted;
|
|
}
|
|
|
|
clear(matcher) {
|
|
const keys = Object.keys(this);
|
|
let i = keys.length;
|
|
let deleted = false;
|
|
|
|
while (i--) {
|
|
const key = keys[i];
|
|
if(!matcher || matchHeaderValue(this, this[key], key, matcher, true)) {
|
|
delete this[key];
|
|
deleted = true;
|
|
}
|
|
}
|
|
|
|
return deleted;
|
|
}
|
|
|
|
normalize(format) {
|
|
const self = this;
|
|
const headers = {};
|
|
|
|
utils$1.forEach(this, (value, header) => {
|
|
const key = utils$1.findKey(headers, header);
|
|
|
|
if (key) {
|
|
self[key] = normalizeValue(value);
|
|
delete self[header];
|
|
return;
|
|
}
|
|
|
|
const normalized = format ? formatHeader(header) : String(header).trim();
|
|
|
|
if (normalized !== header) {
|
|
delete self[header];
|
|
}
|
|
|
|
self[normalized] = normalizeValue(value);
|
|
|
|
headers[normalized] = true;
|
|
});
|
|
|
|
return this;
|
|
}
|
|
|
|
concat(...targets) {
|
|
return this.constructor.concat(this, ...targets);
|
|
}
|
|
|
|
toJSON(asStrings) {
|
|
const obj = Object.create(null);
|
|
|
|
utils$1.forEach(this, (value, header) => {
|
|
value != null && value !== false && (obj[header] = asStrings && utils$1.isArray(value) ? value.join(', ') : value);
|
|
});
|
|
|
|
return obj;
|
|
}
|
|
|
|
[Symbol.iterator]() {
|
|
return Object.entries(this.toJSON())[Symbol.iterator]();
|
|
}
|
|
|
|
toString() {
|
|
return Object.entries(this.toJSON()).map(([header, value]) => header + ': ' + value).join('\n');
|
|
}
|
|
|
|
getSetCookie() {
|
|
return this.get("set-cookie") || [];
|
|
}
|
|
|
|
get [Symbol.toStringTag]() {
|
|
return 'AxiosHeaders';
|
|
}
|
|
|
|
static from(thing) {
|
|
return thing instanceof this ? thing : new this(thing);
|
|
}
|
|
|
|
static concat(first, ...targets) {
|
|
const computed = new this(first);
|
|
|
|
targets.forEach((target) => computed.set(target));
|
|
|
|
return computed;
|
|
}
|
|
|
|
static accessor(header) {
|
|
const internals = this[$internals] = (this[$internals] = {
|
|
accessors: {}
|
|
});
|
|
|
|
const accessors = internals.accessors;
|
|
const prototype = this.prototype;
|
|
|
|
function defineAccessor(_header) {
|
|
const lHeader = normalizeHeader(_header);
|
|
|
|
if (!accessors[lHeader]) {
|
|
buildAccessors(prototype, _header);
|
|
accessors[lHeader] = true;
|
|
}
|
|
}
|
|
|
|
utils$1.isArray(header) ? header.forEach(defineAccessor) : defineAccessor(header);
|
|
|
|
return this;
|
|
}
|
|
};
|
|
|
|
AxiosHeaders$1.accessor(['Content-Type', 'Content-Length', 'Accept', 'Accept-Encoding', 'User-Agent', 'Authorization']);
|
|
|
|
// reserved names hotfix
|
|
utils$1.reduceDescriptors(AxiosHeaders$1.prototype, ({value}, key) => {
|
|
let mapped = key[0].toUpperCase() + key.slice(1); // map `set` => `Set`
|
|
return {
|
|
get: () => value,
|
|
set(headerValue) {
|
|
this[mapped] = headerValue;
|
|
}
|
|
}
|
|
});
|
|
|
|
utils$1.freezeMethods(AxiosHeaders$1);
|
|
|
|
/**
|
|
* Transform the data for a request or a response
|
|
*
|
|
* @param {Array|Function} fns A single function or Array of functions
|
|
* @param {?Object} response The response object
|
|
*
|
|
* @returns {*} The resulting transformed data
|
|
*/
|
|
function transformData(fns, response) {
|
|
const config = this || defaults;
|
|
const context = response || config;
|
|
const headers = AxiosHeaders$1.from(context.headers);
|
|
let data = context.data;
|
|
|
|
utils$1.forEach(fns, function transform(fn) {
|
|
data = fn.call(config, data, headers.normalize(), response ? response.status : undefined);
|
|
});
|
|
|
|
headers.normalize();
|
|
|
|
return data;
|
|
}
|
|
|
|
function isCancel$1(value) {
|
|
return !!(value && value.__CANCEL__);
|
|
}
|
|
|
|
/**
|
|
* A `CanceledError` is an object that is thrown when an operation is canceled.
|
|
*
|
|
* @param {string=} message The message.
|
|
* @param {Object=} config The config.
|
|
* @param {Object=} request The request.
|
|
*
|
|
* @returns {CanceledError} The created error.
|
|
*/
|
|
function CanceledError$1(message, config, request) {
|
|
// eslint-disable-next-line no-eq-null,eqeqeq
|
|
AxiosError$1.call(this, message == null ? 'canceled' : message, AxiosError$1.ERR_CANCELED, config, request);
|
|
this.name = 'CanceledError';
|
|
}
|
|
|
|
utils$1.inherits(CanceledError$1, AxiosError$1, {
|
|
__CANCEL__: true
|
|
});
|
|
|
|
/**
|
|
* Resolve or reject a Promise based on response status.
|
|
*
|
|
* @param {Function} resolve A function that resolves the promise.
|
|
* @param {Function} reject A function that rejects the promise.
|
|
* @param {object} response The response.
|
|
*
|
|
* @returns {object} The response.
|
|
*/
|
|
function settle(resolve, reject, response) {
|
|
const validateStatus = response.config.validateStatus;
|
|
if (!response.status || !validateStatus || validateStatus(response.status)) {
|
|
resolve(response);
|
|
} else {
|
|
reject(new AxiosError$1(
|
|
'Request failed with status code ' + response.status,
|
|
[AxiosError$1.ERR_BAD_REQUEST, AxiosError$1.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4],
|
|
response.config,
|
|
response.request,
|
|
response
|
|
));
|
|
}
|
|
}
|
|
|
|
function parseProtocol(url) {
|
|
const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url);
|
|
return match && match[1] || '';
|
|
}
|
|
|
|
/**
|
|
* Calculate data maxRate
|
|
* @param {Number} [samplesCount= 10]
|
|
* @param {Number} [min= 1000]
|
|
* @returns {Function}
|
|
*/
|
|
function speedometer(samplesCount, min) {
|
|
samplesCount = samplesCount || 10;
|
|
const bytes = new Array(samplesCount);
|
|
const timestamps = new Array(samplesCount);
|
|
let head = 0;
|
|
let tail = 0;
|
|
let firstSampleTS;
|
|
|
|
min = min !== undefined ? min : 1000;
|
|
|
|
return function push(chunkLength) {
|
|
const now = Date.now();
|
|
|
|
const startedAt = timestamps[tail];
|
|
|
|
if (!firstSampleTS) {
|
|
firstSampleTS = now;
|
|
}
|
|
|
|
bytes[head] = chunkLength;
|
|
timestamps[head] = now;
|
|
|
|
let i = tail;
|
|
let bytesCount = 0;
|
|
|
|
while (i !== head) {
|
|
bytesCount += bytes[i++];
|
|
i = i % samplesCount;
|
|
}
|
|
|
|
head = (head + 1) % samplesCount;
|
|
|
|
if (head === tail) {
|
|
tail = (tail + 1) % samplesCount;
|
|
}
|
|
|
|
if (now - firstSampleTS < min) {
|
|
return;
|
|
}
|
|
|
|
const passed = startedAt && now - startedAt;
|
|
|
|
return passed ? Math.round(bytesCount * 1000 / passed) : undefined;
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Throttle decorator
|
|
* @param {Function} fn
|
|
* @param {Number} freq
|
|
* @return {Function}
|
|
*/
|
|
function throttle(fn, freq) {
|
|
let timestamp = 0;
|
|
let threshold = 1000 / freq;
|
|
let lastArgs;
|
|
let timer;
|
|
|
|
const invoke = (args, now = Date.now()) => {
|
|
timestamp = now;
|
|
lastArgs = null;
|
|
if (timer) {
|
|
clearTimeout(timer);
|
|
timer = null;
|
|
}
|
|
fn(...args);
|
|
};
|
|
|
|
const throttled = (...args) => {
|
|
const now = Date.now();
|
|
const passed = now - timestamp;
|
|
if ( passed >= threshold) {
|
|
invoke(args, now);
|
|
} else {
|
|
lastArgs = args;
|
|
if (!timer) {
|
|
timer = setTimeout(() => {
|
|
timer = null;
|
|
invoke(lastArgs);
|
|
}, threshold - passed);
|
|
}
|
|
}
|
|
};
|
|
|
|
const flush = () => lastArgs && invoke(lastArgs);
|
|
|
|
return [throttled, flush];
|
|
}
|
|
|
|
const progressEventReducer = (listener, isDownloadStream, freq = 3) => {
|
|
let bytesNotified = 0;
|
|
const _speedometer = speedometer(50, 250);
|
|
|
|
return throttle(e => {
|
|
const loaded = e.loaded;
|
|
const total = e.lengthComputable ? e.total : undefined;
|
|
const progressBytes = loaded - bytesNotified;
|
|
const rate = _speedometer(progressBytes);
|
|
const inRange = loaded <= total;
|
|
|
|
bytesNotified = loaded;
|
|
|
|
const data = {
|
|
loaded,
|
|
total,
|
|
progress: total ? (loaded / total) : undefined,
|
|
bytes: progressBytes,
|
|
rate: rate ? rate : undefined,
|
|
estimated: rate && total && inRange ? (total - loaded) / rate : undefined,
|
|
event: e,
|
|
lengthComputable: total != null,
|
|
[isDownloadStream ? 'download' : 'upload']: true
|
|
};
|
|
|
|
listener(data);
|
|
}, freq);
|
|
};
|
|
|
|
const progressEventDecorator = (total, throttled) => {
|
|
const lengthComputable = total != null;
|
|
|
|
return [(loaded) => throttled[0]({
|
|
lengthComputable,
|
|
total,
|
|
loaded
|
|
}), throttled[1]];
|
|
};
|
|
|
|
const asyncDecorator = (fn) => (...args) => utils$1.asap(() => fn(...args));
|
|
|
|
const isURLSameOrigin = platform.hasStandardBrowserEnv ? ((origin, isMSIE) => (url) => {
|
|
url = new URL(url, platform.origin);
|
|
|
|
return (
|
|
origin.protocol === url.protocol &&
|
|
origin.host === url.host &&
|
|
(isMSIE || origin.port === url.port)
|
|
);
|
|
})(
|
|
new URL(platform.origin),
|
|
platform.navigator && /(msie|trident)/i.test(platform.navigator.userAgent)
|
|
) : () => true;
|
|
|
|
const cookies = platform.hasStandardBrowserEnv ?
|
|
|
|
// Standard browser envs support document.cookie
|
|
{
|
|
write(name, value, expires, path, domain, secure) {
|
|
const cookie = [name + '=' + encodeURIComponent(value)];
|
|
|
|
utils$1.isNumber(expires) && cookie.push('expires=' + new Date(expires).toGMTString());
|
|
|
|
utils$1.isString(path) && cookie.push('path=' + path);
|
|
|
|
utils$1.isString(domain) && cookie.push('domain=' + domain);
|
|
|
|
secure === true && cookie.push('secure');
|
|
|
|
document.cookie = cookie.join('; ');
|
|
},
|
|
|
|
read(name) {
|
|
const match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)'));
|
|
return (match ? decodeURIComponent(match[3]) : null);
|
|
},
|
|
|
|
remove(name) {
|
|
this.write(name, '', Date.now() - 86400000);
|
|
}
|
|
}
|
|
|
|
:
|
|
|
|
// Non-standard browser env (web workers, react-native) lack needed support.
|
|
{
|
|
write() {},
|
|
read() {
|
|
return null;
|
|
},
|
|
remove() {}
|
|
};
|
|
|
|
/**
|
|
* Determines whether the specified URL is absolute
|
|
*
|
|
* @param {string} url The URL to test
|
|
*
|
|
* @returns {boolean} True if the specified URL is absolute, otherwise false
|
|
*/
|
|
function isAbsoluteURL(url) {
|
|
// A URL is considered absolute if it begins with "<scheme>://" or "//" (protocol-relative URL).
|
|
// RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed
|
|
// by any combination of letters, digits, plus, period, or hyphen.
|
|
return /^([a-z][a-z\d+\-.]*:)?\/\//i.test(url);
|
|
}
|
|
|
|
/**
|
|
* Creates a new URL by combining the specified URLs
|
|
*
|
|
* @param {string} baseURL The base URL
|
|
* @param {string} relativeURL The relative URL
|
|
*
|
|
* @returns {string} The combined URL
|
|
*/
|
|
function combineURLs(baseURL, relativeURL) {
|
|
return relativeURL
|
|
? baseURL.replace(/\/?\/$/, '') + '/' + relativeURL.replace(/^\/+/, '')
|
|
: baseURL;
|
|
}
|
|
|
|
/**
|
|
* Creates a new URL by combining the baseURL with the requestedURL,
|
|
* only when the requestedURL is not already an absolute URL.
|
|
* If the requestURL is absolute, this function returns the requestedURL untouched.
|
|
*
|
|
* @param {string} baseURL The base URL
|
|
* @param {string} requestedURL Absolute or relative URL to combine
|
|
*
|
|
* @returns {string} The combined full path
|
|
*/
|
|
function buildFullPath(baseURL, requestedURL, allowAbsoluteUrls) {
|
|
let isRelativeUrl = !isAbsoluteURL(requestedURL);
|
|
if (baseURL && (isRelativeUrl || allowAbsoluteUrls == false)) {
|
|
return combineURLs(baseURL, requestedURL);
|
|
}
|
|
return requestedURL;
|
|
}
|
|
|
|
const headersToObject = (thing) => thing instanceof AxiosHeaders$1 ? { ...thing } : thing;
|
|
|
|
/**
|
|
* Config-specific merge-function which creates a new config-object
|
|
* by merging two configuration objects together.
|
|
*
|
|
* @param {Object} config1
|
|
* @param {Object} config2
|
|
*
|
|
* @returns {Object} New object resulting from merging config2 to config1
|
|
*/
|
|
function mergeConfig$1(config1, config2) {
|
|
// eslint-disable-next-line no-param-reassign
|
|
config2 = config2 || {};
|
|
const config = {};
|
|
|
|
function getMergedValue(target, source, prop, caseless) {
|
|
if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) {
|
|
return utils$1.merge.call({caseless}, target, source);
|
|
} else if (utils$1.isPlainObject(source)) {
|
|
return utils$1.merge({}, source);
|
|
} else if (utils$1.isArray(source)) {
|
|
return source.slice();
|
|
}
|
|
return source;
|
|
}
|
|
|
|
// eslint-disable-next-line consistent-return
|
|
function mergeDeepProperties(a, b, prop , caseless) {
|
|
if (!utils$1.isUndefined(b)) {
|
|
return getMergedValue(a, b, prop , caseless);
|
|
} else if (!utils$1.isUndefined(a)) {
|
|
return getMergedValue(undefined, a, prop , caseless);
|
|
}
|
|
}
|
|
|
|
// eslint-disable-next-line consistent-return
|
|
function valueFromConfig2(a, b) {
|
|
if (!utils$1.isUndefined(b)) {
|
|
return getMergedValue(undefined, b);
|
|
}
|
|
}
|
|
|
|
// eslint-disable-next-line consistent-return
|
|
function defaultToConfig2(a, b) {
|
|
if (!utils$1.isUndefined(b)) {
|
|
return getMergedValue(undefined, b);
|
|
} else if (!utils$1.isUndefined(a)) {
|
|
return getMergedValue(undefined, a);
|
|
}
|
|
}
|
|
|
|
// eslint-disable-next-line consistent-return
|
|
function mergeDirectKeys(a, b, prop) {
|
|
if (prop in config2) {
|
|
return getMergedValue(a, b);
|
|
} else if (prop in config1) {
|
|
return getMergedValue(undefined, a);
|
|
}
|
|
}
|
|
|
|
const mergeMap = {
|
|
url: valueFromConfig2,
|
|
method: valueFromConfig2,
|
|
data: valueFromConfig2,
|
|
baseURL: defaultToConfig2,
|
|
transformRequest: defaultToConfig2,
|
|
transformResponse: defaultToConfig2,
|
|
paramsSerializer: defaultToConfig2,
|
|
timeout: defaultToConfig2,
|
|
timeoutMessage: defaultToConfig2,
|
|
withCredentials: defaultToConfig2,
|
|
withXSRFToken: defaultToConfig2,
|
|
adapter: defaultToConfig2,
|
|
responseType: defaultToConfig2,
|
|
xsrfCookieName: defaultToConfig2,
|
|
xsrfHeaderName: defaultToConfig2,
|
|
onUploadProgress: defaultToConfig2,
|
|
onDownloadProgress: defaultToConfig2,
|
|
decompress: defaultToConfig2,
|
|
maxContentLength: defaultToConfig2,
|
|
maxBodyLength: defaultToConfig2,
|
|
beforeRedirect: defaultToConfig2,
|
|
transport: defaultToConfig2,
|
|
httpAgent: defaultToConfig2,
|
|
httpsAgent: defaultToConfig2,
|
|
cancelToken: defaultToConfig2,
|
|
socketPath: defaultToConfig2,
|
|
responseEncoding: defaultToConfig2,
|
|
validateStatus: mergeDirectKeys,
|
|
headers: (a, b , prop) => mergeDeepProperties(headersToObject(a), headersToObject(b),prop, true)
|
|
};
|
|
|
|
utils$1.forEach(Object.keys({...config1, ...config2}), function computeConfigValue(prop) {
|
|
const merge = mergeMap[prop] || mergeDeepProperties;
|
|
const configValue = merge(config1[prop], config2[prop], prop);
|
|
(utils$1.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue);
|
|
});
|
|
|
|
return config;
|
|
}
|
|
|
|
const resolveConfig = (config) => {
|
|
const newConfig = mergeConfig$1({}, config);
|
|
|
|
let {data, withXSRFToken, xsrfHeaderName, xsrfCookieName, headers, auth} = newConfig;
|
|
|
|
newConfig.headers = headers = AxiosHeaders$1.from(headers);
|
|
|
|
newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url, newConfig.allowAbsoluteUrls), config.params, config.paramsSerializer);
|
|
|
|
// HTTP basic authentication
|
|
if (auth) {
|
|
headers.set('Authorization', 'Basic ' +
|
|
btoa((auth.username || '') + ':' + (auth.password ? unescape(encodeURIComponent(auth.password)) : ''))
|
|
);
|
|
}
|
|
|
|
let contentType;
|
|
|
|
if (utils$1.isFormData(data)) {
|
|
if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) {
|
|
headers.setContentType(undefined); // Let the browser set it
|
|
} else if ((contentType = headers.getContentType()) !== false) {
|
|
// fix semicolon duplication issue for ReactNative FormData implementation
|
|
const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : [];
|
|
headers.setContentType([type || 'multipart/form-data', ...tokens].join('; '));
|
|
}
|
|
}
|
|
|
|
// Add xsrf header
|
|
// This is only done if running in a standard browser environment.
|
|
// Specifically not if we're in a web worker, or react-native.
|
|
|
|
if (platform.hasStandardBrowserEnv) {
|
|
withXSRFToken && utils$1.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(newConfig));
|
|
|
|
if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(newConfig.url))) {
|
|
// Add xsrf header
|
|
const xsrfValue = xsrfHeaderName && xsrfCookieName && cookies.read(xsrfCookieName);
|
|
|
|
if (xsrfValue) {
|
|
headers.set(xsrfHeaderName, xsrfValue);
|
|
}
|
|
}
|
|
}
|
|
|
|
return newConfig;
|
|
};
|
|
|
|
const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined';
|
|
|
|
const xhrAdapter = isXHRAdapterSupported && function (config) {
|
|
return new Promise(function dispatchXhrRequest(resolve, reject) {
|
|
const _config = resolveConfig(config);
|
|
let requestData = _config.data;
|
|
const requestHeaders = AxiosHeaders$1.from(_config.headers).normalize();
|
|
let {responseType, onUploadProgress, onDownloadProgress} = _config;
|
|
let onCanceled;
|
|
let uploadThrottled, downloadThrottled;
|
|
let flushUpload, flushDownload;
|
|
|
|
function done() {
|
|
flushUpload && flushUpload(); // flush events
|
|
flushDownload && flushDownload(); // flush events
|
|
|
|
_config.cancelToken && _config.cancelToken.unsubscribe(onCanceled);
|
|
|
|
_config.signal && _config.signal.removeEventListener('abort', onCanceled);
|
|
}
|
|
|
|
let request = new XMLHttpRequest();
|
|
|
|
request.open(_config.method.toUpperCase(), _config.url, true);
|
|
|
|
// Set the request timeout in MS
|
|
request.timeout = _config.timeout;
|
|
|
|
function onloadend() {
|
|
if (!request) {
|
|
return;
|
|
}
|
|
// Prepare the response
|
|
const responseHeaders = AxiosHeaders$1.from(
|
|
'getAllResponseHeaders' in request && request.getAllResponseHeaders()
|
|
);
|
|
const responseData = !responseType || responseType === 'text' || responseType === 'json' ?
|
|
request.responseText : request.response;
|
|
const response = {
|
|
data: responseData,
|
|
status: request.status,
|
|
statusText: request.statusText,
|
|
headers: responseHeaders,
|
|
config,
|
|
request
|
|
};
|
|
|
|
settle(function _resolve(value) {
|
|
resolve(value);
|
|
done();
|
|
}, function _reject(err) {
|
|
reject(err);
|
|
done();
|
|
}, response);
|
|
|
|
// Clean up request
|
|
request = null;
|
|
}
|
|
|
|
if ('onloadend' in request) {
|
|
// Use onloadend if available
|
|
request.onloadend = onloadend;
|
|
} else {
|
|
// Listen for ready state to emulate onloadend
|
|
request.onreadystatechange = function handleLoad() {
|
|
if (!request || request.readyState !== 4) {
|
|
return;
|
|
}
|
|
|
|
// The request errored out and we didn't get a response, this will be
|
|
// handled by onerror instead
|
|
// With one exception: request that using file: protocol, most browsers
|
|
// will return status as 0 even though it's a successful request
|
|
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
|
|
return;
|
|
}
|
|
// readystate handler is calling before onerror or ontimeout handlers,
|
|
// so we should call onloadend on the next 'tick'
|
|
setTimeout(onloadend);
|
|
};
|
|
}
|
|
|
|
// Handle browser request cancellation (as opposed to a manual cancellation)
|
|
request.onabort = function handleAbort() {
|
|
if (!request) {
|
|
return;
|
|
}
|
|
|
|
reject(new AxiosError$1('Request aborted', AxiosError$1.ECONNABORTED, config, request));
|
|
|
|
// Clean up request
|
|
request = null;
|
|
};
|
|
|
|
// Handle low level network errors
|
|
request.onerror = function handleError() {
|
|
// Real errors are hidden from us by the browser
|
|
// onerror should only fire if it's a network error
|
|
reject(new AxiosError$1('Network Error', AxiosError$1.ERR_NETWORK, config, request));
|
|
|
|
// Clean up request
|
|
request = null;
|
|
};
|
|
|
|
// Handle timeout
|
|
request.ontimeout = function handleTimeout() {
|
|
let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded';
|
|
const transitional = _config.transitional || transitionalDefaults;
|
|
if (_config.timeoutErrorMessage) {
|
|
timeoutErrorMessage = _config.timeoutErrorMessage;
|
|
}
|
|
reject(new AxiosError$1(
|
|
timeoutErrorMessage,
|
|
transitional.clarifyTimeoutError ? AxiosError$1.ETIMEDOUT : AxiosError$1.ECONNABORTED,
|
|
config,
|
|
request));
|
|
|
|
// Clean up request
|
|
request = null;
|
|
};
|
|
|
|
// Remove Content-Type if data is undefined
|
|
requestData === undefined && requestHeaders.setContentType(null);
|
|
|
|
// Add headers to the request
|
|
if ('setRequestHeader' in request) {
|
|
utils$1.forEach(requestHeaders.toJSON(), function setRequestHeader(val, key) {
|
|
request.setRequestHeader(key, val);
|
|
});
|
|
}
|
|
|
|
// Add withCredentials to request if needed
|
|
if (!utils$1.isUndefined(_config.withCredentials)) {
|
|
request.withCredentials = !!_config.withCredentials;
|
|
}
|
|
|
|
// Add responseType to request if needed
|
|
if (responseType && responseType !== 'json') {
|
|
request.responseType = _config.responseType;
|
|
}
|
|
|
|
// Handle progress if needed
|
|
if (onDownloadProgress) {
|
|
([downloadThrottled, flushDownload] = progressEventReducer(onDownloadProgress, true));
|
|
request.addEventListener('progress', downloadThrottled);
|
|
}
|
|
|
|
// Not all browsers support upload events
|
|
if (onUploadProgress && request.upload) {
|
|
([uploadThrottled, flushUpload] = progressEventReducer(onUploadProgress));
|
|
|
|
request.upload.addEventListener('progress', uploadThrottled);
|
|
|
|
request.upload.addEventListener('loadend', flushUpload);
|
|
}
|
|
|
|
if (_config.cancelToken || _config.signal) {
|
|
// Handle cancellation
|
|
// eslint-disable-next-line func-names
|
|
onCanceled = cancel => {
|
|
if (!request) {
|
|
return;
|
|
}
|
|
reject(!cancel || cancel.type ? new CanceledError$1(null, config, request) : cancel);
|
|
request.abort();
|
|
request = null;
|
|
};
|
|
|
|
_config.cancelToken && _config.cancelToken.subscribe(onCanceled);
|
|
if (_config.signal) {
|
|
_config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled);
|
|
}
|
|
}
|
|
|
|
const protocol = parseProtocol(_config.url);
|
|
|
|
if (protocol && platform.protocols.indexOf(protocol) === -1) {
|
|
reject(new AxiosError$1('Unsupported protocol ' + protocol + ':', AxiosError$1.ERR_BAD_REQUEST, config));
|
|
return;
|
|
}
|
|
|
|
|
|
// Send the request
|
|
request.send(requestData || null);
|
|
});
|
|
};
|
|
|
|
const composeSignals = (signals, timeout) => {
|
|
const {length} = (signals = signals ? signals.filter(Boolean) : []);
|
|
|
|
if (timeout || length) {
|
|
let controller = new AbortController();
|
|
|
|
let aborted;
|
|
|
|
const onabort = function (reason) {
|
|
if (!aborted) {
|
|
aborted = true;
|
|
unsubscribe();
|
|
const err = reason instanceof Error ? reason : this.reason;
|
|
controller.abort(err instanceof AxiosError$1 ? err : new CanceledError$1(err instanceof Error ? err.message : err));
|
|
}
|
|
};
|
|
|
|
let timer = timeout && setTimeout(() => {
|
|
timer = null;
|
|
onabort(new AxiosError$1(`timeout ${timeout} of ms exceeded`, AxiosError$1.ETIMEDOUT));
|
|
}, timeout);
|
|
|
|
const unsubscribe = () => {
|
|
if (signals) {
|
|
timer && clearTimeout(timer);
|
|
timer = null;
|
|
signals.forEach(signal => {
|
|
signal.unsubscribe ? signal.unsubscribe(onabort) : signal.removeEventListener('abort', onabort);
|
|
});
|
|
signals = null;
|
|
}
|
|
};
|
|
|
|
signals.forEach((signal) => signal.addEventListener('abort', onabort));
|
|
|
|
const {signal} = controller;
|
|
|
|
signal.unsubscribe = () => utils$1.asap(unsubscribe);
|
|
|
|
return signal;
|
|
}
|
|
};
|
|
|
|
const streamChunk = function* (chunk, chunkSize) {
|
|
let len = chunk.byteLength;
|
|
|
|
if (len < chunkSize) {
|
|
yield chunk;
|
|
return;
|
|
}
|
|
|
|
let pos = 0;
|
|
let end;
|
|
|
|
while (pos < len) {
|
|
end = pos + chunkSize;
|
|
yield chunk.slice(pos, end);
|
|
pos = end;
|
|
}
|
|
};
|
|
|
|
const readBytes = async function* (iterable, chunkSize) {
|
|
for await (const chunk of readStream(iterable)) {
|
|
yield* streamChunk(chunk, chunkSize);
|
|
}
|
|
};
|
|
|
|
const readStream = async function* (stream) {
|
|
if (stream[Symbol.asyncIterator]) {
|
|
yield* stream;
|
|
return;
|
|
}
|
|
|
|
const reader = stream.getReader();
|
|
try {
|
|
for (;;) {
|
|
const {done, value} = await reader.read();
|
|
if (done) {
|
|
break;
|
|
}
|
|
yield value;
|
|
}
|
|
} finally {
|
|
await reader.cancel();
|
|
}
|
|
};
|
|
|
|
const trackStream = (stream, chunkSize, onProgress, onFinish) => {
|
|
const iterator = readBytes(stream, chunkSize);
|
|
|
|
let bytes = 0;
|
|
let done;
|
|
let _onFinish = (e) => {
|
|
if (!done) {
|
|
done = true;
|
|
onFinish && onFinish(e);
|
|
}
|
|
};
|
|
|
|
return new ReadableStream({
|
|
async pull(controller) {
|
|
try {
|
|
const {done, value} = await iterator.next();
|
|
|
|
if (done) {
|
|
_onFinish();
|
|
controller.close();
|
|
return;
|
|
}
|
|
|
|
let len = value.byteLength;
|
|
if (onProgress) {
|
|
let loadedBytes = bytes += len;
|
|
onProgress(loadedBytes);
|
|
}
|
|
controller.enqueue(new Uint8Array(value));
|
|
} catch (err) {
|
|
_onFinish(err);
|
|
throw err;
|
|
}
|
|
},
|
|
cancel(reason) {
|
|
_onFinish(reason);
|
|
return iterator.return();
|
|
}
|
|
}, {
|
|
highWaterMark: 2
|
|
})
|
|
};
|
|
|
|
const isFetchSupported = typeof fetch === 'function' && typeof Request === 'function' && typeof Response === 'function';
|
|
const isReadableStreamSupported = isFetchSupported && typeof ReadableStream === 'function';
|
|
|
|
// used only inside the fetch adapter
|
|
const encodeText = isFetchSupported && (typeof TextEncoder === 'function' ?
|
|
((encoder) => (str) => encoder.encode(str))(new TextEncoder()) :
|
|
async (str) => new Uint8Array(await new Response(str).arrayBuffer())
|
|
);
|
|
|
|
const test = (fn, ...args) => {
|
|
try {
|
|
return !!fn(...args);
|
|
} catch (e) {
|
|
return false
|
|
}
|
|
};
|
|
|
|
const supportsRequestStream = isReadableStreamSupported && test(() => {
|
|
let duplexAccessed = false;
|
|
|
|
const hasContentType = new Request(platform.origin, {
|
|
body: new ReadableStream(),
|
|
method: 'POST',
|
|
get duplex() {
|
|
duplexAccessed = true;
|
|
return 'half';
|
|
},
|
|
}).headers.has('Content-Type');
|
|
|
|
return duplexAccessed && !hasContentType;
|
|
});
|
|
|
|
const DEFAULT_CHUNK_SIZE = 64 * 1024;
|
|
|
|
const supportsResponseStream = isReadableStreamSupported &&
|
|
test(() => utils$1.isReadableStream(new Response('').body));
|
|
|
|
|
|
const resolvers = {
|
|
stream: supportsResponseStream && ((res) => res.body)
|
|
};
|
|
|
|
isFetchSupported && (((res) => {
|
|
['text', 'arrayBuffer', 'blob', 'formData', 'stream'].forEach(type => {
|
|
!resolvers[type] && (resolvers[type] = utils$1.isFunction(res[type]) ? (res) => res[type]() :
|
|
(_, config) => {
|
|
throw new AxiosError$1(`Response type '${type}' is not supported`, AxiosError$1.ERR_NOT_SUPPORT, config);
|
|
});
|
|
});
|
|
})(new Response));
|
|
|
|
const getBodyLength = async (body) => {
|
|
if (body == null) {
|
|
return 0;
|
|
}
|
|
|
|
if(utils$1.isBlob(body)) {
|
|
return body.size;
|
|
}
|
|
|
|
if(utils$1.isSpecCompliantForm(body)) {
|
|
const _request = new Request(platform.origin, {
|
|
method: 'POST',
|
|
body,
|
|
});
|
|
return (await _request.arrayBuffer()).byteLength;
|
|
}
|
|
|
|
if(utils$1.isArrayBufferView(body) || utils$1.isArrayBuffer(body)) {
|
|
return body.byteLength;
|
|
}
|
|
|
|
if(utils$1.isURLSearchParams(body)) {
|
|
body = body + '';
|
|
}
|
|
|
|
if(utils$1.isString(body)) {
|
|
return (await encodeText(body)).byteLength;
|
|
}
|
|
};
|
|
|
|
const resolveBodyLength = async (headers, body) => {
|
|
const length = utils$1.toFiniteNumber(headers.getContentLength());
|
|
|
|
return length == null ? getBodyLength(body) : length;
|
|
};
|
|
|
|
const fetchAdapter = isFetchSupported && (async (config) => {
|
|
let {
|
|
url,
|
|
method,
|
|
data,
|
|
signal,
|
|
cancelToken,
|
|
timeout,
|
|
onDownloadProgress,
|
|
onUploadProgress,
|
|
responseType,
|
|
headers,
|
|
withCredentials = 'same-origin',
|
|
fetchOptions
|
|
} = resolveConfig(config);
|
|
|
|
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
|
|
|
|
let composedSignal = composeSignals([signal, cancelToken && cancelToken.toAbortSignal()], timeout);
|
|
|
|
let request;
|
|
|
|
const unsubscribe = composedSignal && composedSignal.unsubscribe && (() => {
|
|
composedSignal.unsubscribe();
|
|
});
|
|
|
|
let requestContentLength;
|
|
|
|
try {
|
|
if (
|
|
onUploadProgress && supportsRequestStream && method !== 'get' && method !== 'head' &&
|
|
(requestContentLength = await resolveBodyLength(headers, data)) !== 0
|
|
) {
|
|
let _request = new Request(url, {
|
|
method: 'POST',
|
|
body: data,
|
|
duplex: "half"
|
|
});
|
|
|
|
let contentTypeHeader;
|
|
|
|
if (utils$1.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) {
|
|
headers.setContentType(contentTypeHeader);
|
|
}
|
|
|
|
if (_request.body) {
|
|
const [onProgress, flush] = progressEventDecorator(
|
|
requestContentLength,
|
|
progressEventReducer(asyncDecorator(onUploadProgress))
|
|
);
|
|
|
|
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush);
|
|
}
|
|
}
|
|
|
|
if (!utils$1.isString(withCredentials)) {
|
|
withCredentials = withCredentials ? 'include' : 'omit';
|
|
}
|
|
|
|
// Cloudflare Workers throws when credentials are defined
|
|
// see https://github.com/cloudflare/workerd/issues/902
|
|
const isCredentialsSupported = "credentials" in Request.prototype;
|
|
request = new Request(url, {
|
|
...fetchOptions,
|
|
signal: composedSignal,
|
|
method: method.toUpperCase(),
|
|
headers: headers.normalize().toJSON(),
|
|
body: data,
|
|
duplex: "half",
|
|
credentials: isCredentialsSupported ? withCredentials : undefined
|
|
});
|
|
|
|
let response = await fetch(request, fetchOptions);
|
|
|
|
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
|
|
|
|
if (supportsResponseStream && (onDownloadProgress || (isStreamResponse && unsubscribe))) {
|
|
const options = {};
|
|
|
|
['status', 'statusText', 'headers'].forEach(prop => {
|
|
options[prop] = response[prop];
|
|
});
|
|
|
|
const responseContentLength = utils$1.toFiniteNumber(response.headers.get('content-length'));
|
|
|
|
const [onProgress, flush] = onDownloadProgress && progressEventDecorator(
|
|
responseContentLength,
|
|
progressEventReducer(asyncDecorator(onDownloadProgress), true)
|
|
) || [];
|
|
|
|
response = new Response(
|
|
trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => {
|
|
flush && flush();
|
|
unsubscribe && unsubscribe();
|
|
}),
|
|
options
|
|
);
|
|
}
|
|
|
|
responseType = responseType || 'text';
|
|
|
|
let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config);
|
|
|
|
!isStreamResponse && unsubscribe && unsubscribe();
|
|
|
|
return await new Promise((resolve, reject) => {
|
|
settle(resolve, reject, {
|
|
data: responseData,
|
|
headers: AxiosHeaders$1.from(response.headers),
|
|
status: response.status,
|
|
statusText: response.statusText,
|
|
config,
|
|
request
|
|
});
|
|
})
|
|
} catch (err) {
|
|
unsubscribe && unsubscribe();
|
|
|
|
if (err && err.name === 'TypeError' && /Load failed|fetch/i.test(err.message)) {
|
|
throw Object.assign(
|
|
new AxiosError$1('Network Error', AxiosError$1.ERR_NETWORK, config, request),
|
|
{
|
|
cause: err.cause || err
|
|
}
|
|
)
|
|
}
|
|
|
|
throw AxiosError$1.from(err, err && err.code, config, request);
|
|
}
|
|
});
|
|
|
|
const knownAdapters = {
|
|
http: httpAdapter,
|
|
xhr: xhrAdapter,
|
|
fetch: fetchAdapter
|
|
};
|
|
|
|
utils$1.forEach(knownAdapters, (fn, value) => {
|
|
if (fn) {
|
|
try {
|
|
Object.defineProperty(fn, 'name', {value});
|
|
} catch (e) {
|
|
// eslint-disable-next-line no-empty
|
|
}
|
|
Object.defineProperty(fn, 'adapterName', {value});
|
|
}
|
|
});
|
|
|
|
const renderReason = (reason) => `- ${reason}`;
|
|
|
|
const isResolvedHandle = (adapter) => utils$1.isFunction(adapter) || adapter === null || adapter === false;
|
|
|
|
const adapters = {
|
|
getAdapter: (adapters) => {
|
|
adapters = utils$1.isArray(adapters) ? adapters : [adapters];
|
|
|
|
const {length} = adapters;
|
|
let nameOrAdapter;
|
|
let adapter;
|
|
|
|
const rejectedReasons = {};
|
|
|
|
for (let i = 0; i < length; i++) {
|
|
nameOrAdapter = adapters[i];
|
|
let id;
|
|
|
|
adapter = nameOrAdapter;
|
|
|
|
if (!isResolvedHandle(nameOrAdapter)) {
|
|
adapter = knownAdapters[(id = String(nameOrAdapter)).toLowerCase()];
|
|
|
|
if (adapter === undefined) {
|
|
throw new AxiosError$1(`Unknown adapter '${id}'`);
|
|
}
|
|
}
|
|
|
|
if (adapter) {
|
|
break;
|
|
}
|
|
|
|
rejectedReasons[id || '#' + i] = adapter;
|
|
}
|
|
|
|
if (!adapter) {
|
|
|
|
const reasons = Object.entries(rejectedReasons)
|
|
.map(([id, state]) => `adapter ${id} ` +
|
|
(state === false ? 'is not supported by the environment' : 'is not available in the build')
|
|
);
|
|
|
|
let s = length ?
|
|
(reasons.length > 1 ? 'since :\n' + reasons.map(renderReason).join('\n') : ' ' + renderReason(reasons[0])) :
|
|
'as no adapter specified';
|
|
|
|
throw new AxiosError$1(
|
|
`There is no suitable adapter to dispatch the request ` + s,
|
|
'ERR_NOT_SUPPORT'
|
|
);
|
|
}
|
|
|
|
return adapter;
|
|
},
|
|
adapters: knownAdapters
|
|
};
|
|
|
|
/**
|
|
* Throws a `CanceledError` if cancellation has been requested.
|
|
*
|
|
* @param {Object} config The config that is to be used for the request
|
|
*
|
|
* @returns {void}
|
|
*/
|
|
function throwIfCancellationRequested(config) {
|
|
if (config.cancelToken) {
|
|
config.cancelToken.throwIfRequested();
|
|
}
|
|
|
|
if (config.signal && config.signal.aborted) {
|
|
throw new CanceledError$1(null, config);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Dispatch a request to the server using the configured adapter.
|
|
*
|
|
* @param {object} config The config that is to be used for the request
|
|
*
|
|
* @returns {Promise} The Promise to be fulfilled
|
|
*/
|
|
function dispatchRequest(config) {
|
|
throwIfCancellationRequested(config);
|
|
|
|
config.headers = AxiosHeaders$1.from(config.headers);
|
|
|
|
// Transform request data
|
|
config.data = transformData.call(
|
|
config,
|
|
config.transformRequest
|
|
);
|
|
|
|
if (['post', 'put', 'patch'].indexOf(config.method) !== -1) {
|
|
config.headers.setContentType('application/x-www-form-urlencoded', false);
|
|
}
|
|
|
|
const adapter = adapters.getAdapter(config.adapter || defaults.adapter);
|
|
|
|
return adapter(config).then(function onAdapterResolution(response) {
|
|
throwIfCancellationRequested(config);
|
|
|
|
// Transform response data
|
|
response.data = transformData.call(
|
|
config,
|
|
config.transformResponse,
|
|
response
|
|
);
|
|
|
|
response.headers = AxiosHeaders$1.from(response.headers);
|
|
|
|
return response;
|
|
}, function onAdapterRejection(reason) {
|
|
if (!isCancel$1(reason)) {
|
|
throwIfCancellationRequested(config);
|
|
|
|
// Transform response data
|
|
if (reason && reason.response) {
|
|
reason.response.data = transformData.call(
|
|
config,
|
|
config.transformResponse,
|
|
reason.response
|
|
);
|
|
reason.response.headers = AxiosHeaders$1.from(reason.response.headers);
|
|
}
|
|
}
|
|
|
|
return Promise.reject(reason);
|
|
});
|
|
}
|
|
|
|
const VERSION$1 = "1.11.0";
|
|
|
|
const validators$1 = {};
|
|
|
|
// eslint-disable-next-line func-names
|
|
['object', 'boolean', 'number', 'function', 'string', 'symbol'].forEach((type, i) => {
|
|
validators$1[type] = function validator(thing) {
|
|
return typeof thing === type || 'a' + (i < 1 ? 'n ' : ' ') + type;
|
|
};
|
|
});
|
|
|
|
const deprecatedWarnings = {};
|
|
|
|
/**
|
|
* Transitional option validator
|
|
*
|
|
* @param {function|boolean?} validator - set to false if the transitional option has been removed
|
|
* @param {string?} version - deprecated version / removed since version
|
|
* @param {string?} message - some message with additional info
|
|
*
|
|
* @returns {function}
|
|
*/
|
|
validators$1.transitional = function transitional(validator, version, message) {
|
|
function formatMessage(opt, desc) {
|
|
return '[Axios v' + VERSION$1 + '] Transitional option \'' + opt + '\'' + desc + (message ? '. ' + message : '');
|
|
}
|
|
|
|
// eslint-disable-next-line func-names
|
|
return (value, opt, opts) => {
|
|
if (validator === false) {
|
|
throw new AxiosError$1(
|
|
formatMessage(opt, ' has been removed' + (version ? ' in ' + version : '')),
|
|
AxiosError$1.ERR_DEPRECATED
|
|
);
|
|
}
|
|
|
|
if (version && !deprecatedWarnings[opt]) {
|
|
deprecatedWarnings[opt] = true;
|
|
// eslint-disable-next-line no-console
|
|
console.warn(
|
|
formatMessage(
|
|
opt,
|
|
' has been deprecated since v' + version + ' and will be removed in the near future'
|
|
)
|
|
);
|
|
}
|
|
|
|
return validator ? validator(value, opt, opts) : true;
|
|
};
|
|
};
|
|
|
|
validators$1.spelling = function spelling(correctSpelling) {
|
|
return (value, opt) => {
|
|
// eslint-disable-next-line no-console
|
|
console.warn(`${opt} is likely a misspelling of ${correctSpelling}`);
|
|
return true;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Assert object's properties type
|
|
*
|
|
* @param {object} options
|
|
* @param {object} schema
|
|
* @param {boolean?} allowUnknown
|
|
*
|
|
* @returns {object}
|
|
*/
|
|
|
|
function assertOptions(options, schema, allowUnknown) {
|
|
if (typeof options !== 'object') {
|
|
throw new AxiosError$1('options must be an object', AxiosError$1.ERR_BAD_OPTION_VALUE);
|
|
}
|
|
const keys = Object.keys(options);
|
|
let i = keys.length;
|
|
while (i-- > 0) {
|
|
const opt = keys[i];
|
|
const validator = schema[opt];
|
|
if (validator) {
|
|
const value = options[opt];
|
|
const result = value === undefined || validator(value, opt, options);
|
|
if (result !== true) {
|
|
throw new AxiosError$1('option ' + opt + ' must be ' + result, AxiosError$1.ERR_BAD_OPTION_VALUE);
|
|
}
|
|
continue;
|
|
}
|
|
if (allowUnknown !== true) {
|
|
throw new AxiosError$1('Unknown option ' + opt, AxiosError$1.ERR_BAD_OPTION);
|
|
}
|
|
}
|
|
}
|
|
|
|
const validator = {
|
|
assertOptions,
|
|
validators: validators$1
|
|
};
|
|
|
|
const validators = validator.validators;
|
|
|
|
/**
|
|
* Create a new instance of Axios
|
|
*
|
|
* @param {Object} instanceConfig The default config for the instance
|
|
*
|
|
* @return {Axios} A new instance of Axios
|
|
*/
|
|
let Axios$1 = class Axios {
|
|
constructor(instanceConfig) {
|
|
this.defaults = instanceConfig || {};
|
|
this.interceptors = {
|
|
request: new InterceptorManager(),
|
|
response: new InterceptorManager()
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Dispatch a request
|
|
*
|
|
* @param {String|Object} configOrUrl The config specific for this request (merged with this.defaults)
|
|
* @param {?Object} config
|
|
*
|
|
* @returns {Promise} The Promise to be fulfilled
|
|
*/
|
|
async request(configOrUrl, config) {
|
|
try {
|
|
return await this._request(configOrUrl, config);
|
|
} catch (err) {
|
|
if (err instanceof Error) {
|
|
let dummy = {};
|
|
|
|
Error.captureStackTrace ? Error.captureStackTrace(dummy) : (dummy = new Error());
|
|
|
|
// slice off the Error: ... line
|
|
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
|
|
try {
|
|
if (!err.stack) {
|
|
err.stack = stack;
|
|
// match without the 2 top stack lines
|
|
} else if (stack && !String(err.stack).endsWith(stack.replace(/^.+\n.+\n/, ''))) {
|
|
err.stack += '\n' + stack;
|
|
}
|
|
} catch (e) {
|
|
// ignore the case where "stack" is an un-writable property
|
|
}
|
|
}
|
|
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
_request(configOrUrl, config) {
|
|
/*eslint no-param-reassign:0*/
|
|
// Allow for axios('example/url'[, config]) a la fetch API
|
|
if (typeof configOrUrl === 'string') {
|
|
config = config || {};
|
|
config.url = configOrUrl;
|
|
} else {
|
|
config = configOrUrl || {};
|
|
}
|
|
|
|
config = mergeConfig$1(this.defaults, config);
|
|
|
|
const {transitional, paramsSerializer, headers} = config;
|
|
|
|
if (transitional !== undefined) {
|
|
validator.assertOptions(transitional, {
|
|
silentJSONParsing: validators.transitional(validators.boolean),
|
|
forcedJSONParsing: validators.transitional(validators.boolean),
|
|
clarifyTimeoutError: validators.transitional(validators.boolean)
|
|
}, false);
|
|
}
|
|
|
|
if (paramsSerializer != null) {
|
|
if (utils$1.isFunction(paramsSerializer)) {
|
|
config.paramsSerializer = {
|
|
serialize: paramsSerializer
|
|
};
|
|
} else {
|
|
validator.assertOptions(paramsSerializer, {
|
|
encode: validators.function,
|
|
serialize: validators.function
|
|
}, true);
|
|
}
|
|
}
|
|
|
|
// Set config.allowAbsoluteUrls
|
|
if (config.allowAbsoluteUrls !== undefined) ; else if (this.defaults.allowAbsoluteUrls !== undefined) {
|
|
config.allowAbsoluteUrls = this.defaults.allowAbsoluteUrls;
|
|
} else {
|
|
config.allowAbsoluteUrls = true;
|
|
}
|
|
|
|
validator.assertOptions(config, {
|
|
baseUrl: validators.spelling('baseURL'),
|
|
withXsrfToken: validators.spelling('withXSRFToken')
|
|
}, true);
|
|
|
|
// Set config.method
|
|
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
|
|
|
|
// Flatten headers
|
|
let contextHeaders = headers && utils$1.merge(
|
|
headers.common,
|
|
headers[config.method]
|
|
);
|
|
|
|
headers && utils$1.forEach(
|
|
['delete', 'get', 'head', 'post', 'put', 'patch', 'common'],
|
|
(method) => {
|
|
delete headers[method];
|
|
}
|
|
);
|
|
|
|
config.headers = AxiosHeaders$1.concat(contextHeaders, headers);
|
|
|
|
// filter out skipped interceptors
|
|
const requestInterceptorChain = [];
|
|
let synchronousRequestInterceptors = true;
|
|
this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) {
|
|
if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) {
|
|
return;
|
|
}
|
|
|
|
synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous;
|
|
|
|
requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected);
|
|
});
|
|
|
|
const responseInterceptorChain = [];
|
|
this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) {
|
|
responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected);
|
|
});
|
|
|
|
let promise;
|
|
let i = 0;
|
|
let len;
|
|
|
|
if (!synchronousRequestInterceptors) {
|
|
const chain = [dispatchRequest.bind(this), undefined];
|
|
chain.unshift(...requestInterceptorChain);
|
|
chain.push(...responseInterceptorChain);
|
|
len = chain.length;
|
|
|
|
promise = Promise.resolve(config);
|
|
|
|
while (i < len) {
|
|
promise = promise.then(chain[i++], chain[i++]);
|
|
}
|
|
|
|
return promise;
|
|
}
|
|
|
|
len = requestInterceptorChain.length;
|
|
|
|
let newConfig = config;
|
|
|
|
i = 0;
|
|
|
|
while (i < len) {
|
|
const onFulfilled = requestInterceptorChain[i++];
|
|
const onRejected = requestInterceptorChain[i++];
|
|
try {
|
|
newConfig = onFulfilled(newConfig);
|
|
} catch (error) {
|
|
onRejected.call(this, error);
|
|
break;
|
|
}
|
|
}
|
|
|
|
try {
|
|
promise = dispatchRequest.call(this, newConfig);
|
|
} catch (error) {
|
|
return Promise.reject(error);
|
|
}
|
|
|
|
i = 0;
|
|
len = responseInterceptorChain.length;
|
|
|
|
while (i < len) {
|
|
promise = promise.then(responseInterceptorChain[i++], responseInterceptorChain[i++]);
|
|
}
|
|
|
|
return promise;
|
|
}
|
|
|
|
getUri(config) {
|
|
config = mergeConfig$1(this.defaults, config);
|
|
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
|
|
return buildURL(fullPath, config.params, config.paramsSerializer);
|
|
}
|
|
};
|
|
|
|
// Provide aliases for supported request methods
|
|
utils$1.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) {
|
|
/*eslint func-names:0*/
|
|
Axios$1.prototype[method] = function(url, config) {
|
|
return this.request(mergeConfig$1(config || {}, {
|
|
method,
|
|
url,
|
|
data: (config || {}).data
|
|
}));
|
|
};
|
|
});
|
|
|
|
utils$1.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
|
|
/*eslint func-names:0*/
|
|
|
|
function generateHTTPMethod(isForm) {
|
|
return function httpMethod(url, data, config) {
|
|
return this.request(mergeConfig$1(config || {}, {
|
|
method,
|
|
headers: isForm ? {
|
|
'Content-Type': 'multipart/form-data'
|
|
} : {},
|
|
url,
|
|
data
|
|
}));
|
|
};
|
|
}
|
|
|
|
Axios$1.prototype[method] = generateHTTPMethod();
|
|
|
|
Axios$1.prototype[method + 'Form'] = generateHTTPMethod(true);
|
|
});
|
|
|
|
/**
|
|
* A `CancelToken` is an object that can be used to request cancellation of an operation.
|
|
*
|
|
* @param {Function} executor The executor function.
|
|
*
|
|
* @returns {CancelToken}
|
|
*/
|
|
let CancelToken$1 = class CancelToken {
|
|
constructor(executor) {
|
|
if (typeof executor !== 'function') {
|
|
throw new TypeError('executor must be a function.');
|
|
}
|
|
|
|
let resolvePromise;
|
|
|
|
this.promise = new Promise(function promiseExecutor(resolve) {
|
|
resolvePromise = resolve;
|
|
});
|
|
|
|
const token = this;
|
|
|
|
// eslint-disable-next-line func-names
|
|
this.promise.then(cancel => {
|
|
if (!token._listeners) return;
|
|
|
|
let i = token._listeners.length;
|
|
|
|
while (i-- > 0) {
|
|
token._listeners[i](cancel);
|
|
}
|
|
token._listeners = null;
|
|
});
|
|
|
|
// eslint-disable-next-line func-names
|
|
this.promise.then = onfulfilled => {
|
|
let _resolve;
|
|
// eslint-disable-next-line func-names
|
|
const promise = new Promise(resolve => {
|
|
token.subscribe(resolve);
|
|
_resolve = resolve;
|
|
}).then(onfulfilled);
|
|
|
|
promise.cancel = function reject() {
|
|
token.unsubscribe(_resolve);
|
|
};
|
|
|
|
return promise;
|
|
};
|
|
|
|
executor(function cancel(message, config, request) {
|
|
if (token.reason) {
|
|
// Cancellation has already been requested
|
|
return;
|
|
}
|
|
|
|
token.reason = new CanceledError$1(message, config, request);
|
|
resolvePromise(token.reason);
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Throws a `CanceledError` if cancellation has been requested.
|
|
*/
|
|
throwIfRequested() {
|
|
if (this.reason) {
|
|
throw this.reason;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Subscribe to the cancel signal
|
|
*/
|
|
|
|
subscribe(listener) {
|
|
if (this.reason) {
|
|
listener(this.reason);
|
|
return;
|
|
}
|
|
|
|
if (this._listeners) {
|
|
this._listeners.push(listener);
|
|
} else {
|
|
this._listeners = [listener];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Unsubscribe from the cancel signal
|
|
*/
|
|
|
|
unsubscribe(listener) {
|
|
if (!this._listeners) {
|
|
return;
|
|
}
|
|
const index = this._listeners.indexOf(listener);
|
|
if (index !== -1) {
|
|
this._listeners.splice(index, 1);
|
|
}
|
|
}
|
|
|
|
toAbortSignal() {
|
|
const controller = new AbortController();
|
|
|
|
const abort = (err) => {
|
|
controller.abort(err);
|
|
};
|
|
|
|
this.subscribe(abort);
|
|
|
|
controller.signal.unsubscribe = () => this.unsubscribe(abort);
|
|
|
|
return controller.signal;
|
|
}
|
|
|
|
/**
|
|
* Returns an object that contains a new `CancelToken` and a function that, when called,
|
|
* cancels the `CancelToken`.
|
|
*/
|
|
static source() {
|
|
let cancel;
|
|
const token = new CancelToken(function executor(c) {
|
|
cancel = c;
|
|
});
|
|
return {
|
|
token,
|
|
cancel
|
|
};
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Syntactic sugar for invoking a function and expanding an array for arguments.
|
|
*
|
|
* Common use case would be to use `Function.prototype.apply`.
|
|
*
|
|
* ```js
|
|
* function f(x, y, z) {}
|
|
* var args = [1, 2, 3];
|
|
* f.apply(null, args);
|
|
* ```
|
|
*
|
|
* With `spread` this example can be re-written.
|
|
*
|
|
* ```js
|
|
* spread(function(x, y, z) {})([1, 2, 3]);
|
|
* ```
|
|
*
|
|
* @param {Function} callback
|
|
*
|
|
* @returns {Function}
|
|
*/
|
|
function spread$1(callback) {
|
|
return function wrap(arr) {
|
|
return callback.apply(null, arr);
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Determines whether the payload is an error thrown by Axios
|
|
*
|
|
* @param {*} payload The value to test
|
|
*
|
|
* @returns {boolean} True if the payload is an error thrown by Axios, otherwise false
|
|
*/
|
|
function isAxiosError$1(payload) {
|
|
return utils$1.isObject(payload) && (payload.isAxiosError === true);
|
|
}
|
|
|
|
const HttpStatusCode$1 = {
|
|
Continue: 100,
|
|
SwitchingProtocols: 101,
|
|
Processing: 102,
|
|
EarlyHints: 103,
|
|
Ok: 200,
|
|
Created: 201,
|
|
Accepted: 202,
|
|
NonAuthoritativeInformation: 203,
|
|
NoContent: 204,
|
|
ResetContent: 205,
|
|
PartialContent: 206,
|
|
MultiStatus: 207,
|
|
AlreadyReported: 208,
|
|
ImUsed: 226,
|
|
MultipleChoices: 300,
|
|
MovedPermanently: 301,
|
|
Found: 302,
|
|
SeeOther: 303,
|
|
NotModified: 304,
|
|
UseProxy: 305,
|
|
Unused: 306,
|
|
TemporaryRedirect: 307,
|
|
PermanentRedirect: 308,
|
|
BadRequest: 400,
|
|
Unauthorized: 401,
|
|
PaymentRequired: 402,
|
|
Forbidden: 403,
|
|
NotFound: 404,
|
|
MethodNotAllowed: 405,
|
|
NotAcceptable: 406,
|
|
ProxyAuthenticationRequired: 407,
|
|
RequestTimeout: 408,
|
|
Conflict: 409,
|
|
Gone: 410,
|
|
LengthRequired: 411,
|
|
PreconditionFailed: 412,
|
|
PayloadTooLarge: 413,
|
|
UriTooLong: 414,
|
|
UnsupportedMediaType: 415,
|
|
RangeNotSatisfiable: 416,
|
|
ExpectationFailed: 417,
|
|
ImATeapot: 418,
|
|
MisdirectedRequest: 421,
|
|
UnprocessableEntity: 422,
|
|
Locked: 423,
|
|
FailedDependency: 424,
|
|
TooEarly: 425,
|
|
UpgradeRequired: 426,
|
|
PreconditionRequired: 428,
|
|
TooManyRequests: 429,
|
|
RequestHeaderFieldsTooLarge: 431,
|
|
UnavailableForLegalReasons: 451,
|
|
InternalServerError: 500,
|
|
NotImplemented: 501,
|
|
BadGateway: 502,
|
|
ServiceUnavailable: 503,
|
|
GatewayTimeout: 504,
|
|
HttpVersionNotSupported: 505,
|
|
VariantAlsoNegotiates: 506,
|
|
InsufficientStorage: 507,
|
|
LoopDetected: 508,
|
|
NotExtended: 510,
|
|
NetworkAuthenticationRequired: 511,
|
|
};
|
|
|
|
Object.entries(HttpStatusCode$1).forEach(([key, value]) => {
|
|
HttpStatusCode$1[value] = key;
|
|
});
|
|
|
|
/**
|
|
* Create an instance of Axios
|
|
*
|
|
* @param {Object} defaultConfig The default config for the instance
|
|
*
|
|
* @returns {Axios} A new instance of Axios
|
|
*/
|
|
function createInstance(defaultConfig) {
|
|
const context = new Axios$1(defaultConfig);
|
|
const instance = bind(Axios$1.prototype.request, context);
|
|
|
|
// Copy axios.prototype to instance
|
|
utils$1.extend(instance, Axios$1.prototype, context, {allOwnKeys: true});
|
|
|
|
// Copy context to instance
|
|
utils$1.extend(instance, context, null, {allOwnKeys: true});
|
|
|
|
// Factory for creating new instances
|
|
instance.create = function create(instanceConfig) {
|
|
return createInstance(mergeConfig$1(defaultConfig, instanceConfig));
|
|
};
|
|
|
|
return instance;
|
|
}
|
|
|
|
// Create the default instance to be exported
|
|
const axios = createInstance(defaults);
|
|
|
|
// Expose Axios class to allow class inheritance
|
|
axios.Axios = Axios$1;
|
|
|
|
// Expose Cancel & CancelToken
|
|
axios.CanceledError = CanceledError$1;
|
|
axios.CancelToken = CancelToken$1;
|
|
axios.isCancel = isCancel$1;
|
|
axios.VERSION = VERSION$1;
|
|
axios.toFormData = toFormData$1;
|
|
|
|
// Expose AxiosError class
|
|
axios.AxiosError = AxiosError$1;
|
|
|
|
// alias for CanceledError for backward compatibility
|
|
axios.Cancel = axios.CanceledError;
|
|
|
|
// Expose all/spread
|
|
axios.all = function all(promises) {
|
|
return Promise.all(promises);
|
|
};
|
|
|
|
axios.spread = spread$1;
|
|
|
|
// Expose isAxiosError
|
|
axios.isAxiosError = isAxiosError$1;
|
|
|
|
// Expose mergeConfig
|
|
axios.mergeConfig = mergeConfig$1;
|
|
|
|
axios.AxiosHeaders = AxiosHeaders$1;
|
|
|
|
axios.formToJSON = thing => formDataToJSON(utils$1.isHTMLForm(thing) ? new FormData(thing) : thing);
|
|
|
|
axios.getAdapter = adapters.getAdapter;
|
|
|
|
axios.HttpStatusCode = HttpStatusCode$1;
|
|
|
|
axios.default = axios;
|
|
|
|
// This module is intended to unwrap Axios default export as named.
|
|
// Keep top-level export same with static properties
|
|
// so that it can keep same with es module or cjs
|
|
const {
|
|
Axios,
|
|
AxiosError,
|
|
CanceledError,
|
|
isCancel,
|
|
CancelToken,
|
|
VERSION,
|
|
all,
|
|
Cancel,
|
|
isAxiosError,
|
|
spread,
|
|
toFormData,
|
|
AxiosHeaders,
|
|
HttpStatusCode,
|
|
formToJSON,
|
|
getAdapter,
|
|
mergeConfig
|
|
} = axios;
|
|
|
|
export { AxiosError as A, __vitePreload as _, axios as a, create as b, createClient as c };
|
|
//# sourceMappingURL=vendor-utils.js.map
|