Compare commits
60 Commits
e98bebd60b
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b8d33f4f8c | ||
|
|
f3ce7705d6 | ||
|
|
70af4962a6 | ||
|
|
8cf6f9eedd | ||
|
|
c99f2abe10 | ||
|
|
a06436baac | ||
|
|
9657a9d221 | ||
|
|
3c19dee236 | ||
|
|
b4c436f909 | ||
|
|
db6a6ac609 | ||
|
|
ef48372033 | ||
|
|
1dd216f933 | ||
|
|
ac5b23326c | ||
|
|
3912b3dd41 | ||
|
|
9332cae350 | ||
|
|
aa9813aed4 | ||
|
|
fba72692ee | ||
|
|
04789ea9a1 | ||
|
|
219680fb5e | ||
|
|
4f4559023b | ||
|
|
1f1e0756d0 | ||
|
|
9c0ffa5721 | ||
|
|
b4f4104e3e | ||
|
|
18821fd560 | ||
|
|
ea71e54058 | ||
|
|
4ed4d83257 | ||
|
|
f6b7d4ba8d | ||
|
|
a84caaa31e | ||
|
|
8bdfc85027 | ||
|
|
e84d961536 | ||
|
|
9c1552e439 | ||
|
|
802ff28754 | ||
|
|
7d919979cc | ||
|
|
784a914ebf | ||
|
|
0df13abfee | ||
|
|
cf169b6b69 | ||
|
|
25066af67c | ||
|
|
c8567da75f | ||
|
|
30ea4b6ecb | ||
|
|
cd1e395ffa | ||
|
|
8934ba1e51 | ||
|
|
ed29e7c22c | ||
|
|
39b36bf6d9 | ||
|
|
bf996749e5 | ||
|
|
db2617f37d | ||
|
|
a720b4a0aa | ||
|
|
d98fd2cd36 | ||
|
|
7c1a8fa31a | ||
|
|
6cf0bf626b | ||
|
|
b7ad978e29 | ||
|
|
42cb524cd8 | ||
|
|
a1394d4901 | ||
|
|
3068463995 | ||
|
|
97a208734e | ||
|
|
0caef0b89a | ||
|
|
9bb4ecb0dd | ||
|
|
e92f11241d | ||
|
|
a71a741f76 | ||
|
|
d1c500a677 | ||
|
|
868f46fc6e |
11
.env.example
11
.env.example
@@ -1,7 +1,4 @@
|
|||||||
# NocoDB (primary data source)
|
# NocoDB (PilgrimStats only — museum sales come from ERP API via server proxy)
|
||||||
REACT_APP_NOCODB_URL=http://localhost:8090
|
VITE_NOCODB_URL=http://localhost:8090
|
||||||
REACT_APP_NOCODB_TOKEN=your_token_here
|
VITE_NOCODB_TOKEN=your-token
|
||||||
|
VITE_NOCODB_BASE_ID=your-base-id
|
||||||
# Google Sheets (fallback if NocoDB fails)
|
|
||||||
REACT_APP_SHEETS_ID=your_spreadsheet_id_here
|
|
||||||
REACT_APP_SHEETS_NAME=Consolidated Data
|
|
||||||
|
|||||||
63
.gitea/workflows/deploy.yml
Normal file
63
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
name: Deploy HiHala Dashboard
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, master]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
# --- Frontend ---
|
||||||
|
- name: Build frontend
|
||||||
|
env:
|
||||||
|
VITE_NOCODB_URL: ${{ secrets.VITE_NOCODB_URL }}
|
||||||
|
VITE_NOCODB_TOKEN: ${{ secrets.VITE_NOCODB_TOKEN }}
|
||||||
|
VITE_NOCODB_BASE_ID: ${{ secrets.VITE_NOCODB_BASE_ID }}
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
- name: Deploy frontend
|
||||||
|
run: rsync -a --delete build/ /opt/apps/hihala-dashboard/build/
|
||||||
|
|
||||||
|
# --- Backend ---
|
||||||
|
- name: Deploy server
|
||||||
|
run: rsync -a --delete --exclude='.env' --exclude='node_modules' server/ /opt/apps/hihala-dashboard/server/
|
||||||
|
|
||||||
|
- name: Install server dependencies
|
||||||
|
run: cd /opt/apps/hihala-dashboard/server && npm ci
|
||||||
|
|
||||||
|
- name: Write server .env
|
||||||
|
env:
|
||||||
|
ADMIN_PIN: ${{ secrets.ADMIN_PIN }}
|
||||||
|
NOCODB_URL: ${{ secrets.VITE_NOCODB_URL }}
|
||||||
|
NOCODB_TOKEN: ${{ secrets.VITE_NOCODB_TOKEN }}
|
||||||
|
NOCODB_BASE_ID: ${{ secrets.VITE_NOCODB_BASE_ID }}
|
||||||
|
ERP_API_URL: ${{ secrets.ERP_API_URL }}
|
||||||
|
ERP_API_CODE: ${{ secrets.ERP_API_CODE }}
|
||||||
|
ERP_USERNAME: ${{ secrets.ERP_USERNAME }}
|
||||||
|
ERP_PASSWORD: ${{ secrets.ERP_PASSWORD }}
|
||||||
|
ETL_SECRET: ${{ secrets.ETL_SECRET }}
|
||||||
|
run: |
|
||||||
|
cat > /opt/apps/hihala-dashboard/server/.env << EOF
|
||||||
|
NODE_ENV=production
|
||||||
|
SERVER_PORT=3002
|
||||||
|
ADMIN_PIN=${ADMIN_PIN}
|
||||||
|
NOCODB_URL=${NOCODB_URL}
|
||||||
|
NOCODB_TOKEN=${NOCODB_TOKEN}
|
||||||
|
NOCODB_BASE_ID=${NOCODB_BASE_ID}
|
||||||
|
ERP_API_URL=${ERP_API_URL}
|
||||||
|
ERP_API_CODE=${ERP_API_CODE}
|
||||||
|
ERP_USERNAME=${ERP_USERNAME}
|
||||||
|
ERP_PASSWORD=${ERP_PASSWORD}
|
||||||
|
ETL_SECRET=${ETL_SECRET}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Restart server service
|
||||||
|
run: sudo systemctl restart hihala-dashboard.service
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
/node_modules
|
/node_modules
|
||||||
|
/server/node_modules
|
||||||
/.pnp
|
/.pnp
|
||||||
.pnp.js
|
.pnp.js
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,79 @@
|
|||||||
|
# Dashboard Quick & Medium Improvements
|
||||||
|
|
||||||
|
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||||
|
|
||||||
|
**Goal:** Improve reliability, performance, and code quality of the HiHala Dashboard.
|
||||||
|
|
||||||
|
**Architecture:** Focused improvements across data layer (timeout, retry), UI (error handling, loading skeletons, code splitting), config (VAT rate), and DX (TypeScript strict, dead code removal).
|
||||||
|
|
||||||
|
**Tech Stack:** React 19, Vite 7, TypeScript 5, Chart.js
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 1: Fetch Timeout + Retry Logic
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/services/dataService.ts`
|
||||||
|
|
||||||
|
- [ ] Add `fetchWithTimeout` wrapper (10s timeout) around all fetch calls
|
||||||
|
- [ ] Add retry with exponential backoff (3 attempts, 1s/2s/4s) to `fetchNocoDBTable` and `discoverTableIds`
|
||||||
|
- [ ] Commit
|
||||||
|
|
||||||
|
### Task 2: Friendly Error Handling
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/App.tsx` (error display)
|
||||||
|
- Modify: `src/services/dataService.ts` (error classification)
|
||||||
|
|
||||||
|
- [ ] Add error classification in dataService (network, auth, config, unknown)
|
||||||
|
- [ ] Replace raw error message in App.tsx with user-friendly messages using i18n keys
|
||||||
|
- [ ] Add error keys to `src/locales/en.json` and `src/locales/ar.json`
|
||||||
|
- [ ] Commit
|
||||||
|
|
||||||
|
### Task 3: Remove Dead Code
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Delete: `src/hooks/useUrlState.ts`
|
||||||
|
- Delete: `src/services/sallaService.ts`
|
||||||
|
|
||||||
|
- [ ] Delete unused files
|
||||||
|
- [ ] Verify no imports reference them
|
||||||
|
- [ ] Commit
|
||||||
|
|
||||||
|
### Task 4: Route-Based Code Splitting
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/App.tsx`
|
||||||
|
|
||||||
|
- [ ] Lazy-load Dashboard, Comparison, Slides with `React.lazy` + `Suspense`
|
||||||
|
- [ ] Commit
|
||||||
|
|
||||||
|
### Task 5: Loading Skeletons
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Create: `src/components/shared/LoadingSkeleton.tsx`
|
||||||
|
- Modify: `src/App.tsx` (replace spinner with skeleton)
|
||||||
|
- Modify: `src/App.css` (skeleton styles)
|
||||||
|
|
||||||
|
- [ ] Create skeleton component (stat cards + chart placeholders)
|
||||||
|
- [ ] Use as Suspense fallback and initial loading state
|
||||||
|
- [ ] Commit
|
||||||
|
|
||||||
|
### Task 6: VAT Rate from Config
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/services/dataService.ts`
|
||||||
|
|
||||||
|
- [ ] Extract VAT_RATE to a named constant at top of file
|
||||||
|
- [ ] Commit
|
||||||
|
|
||||||
|
### Task 7: TypeScript Strict Mode
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `tsconfig.json`
|
||||||
|
- Modify: various files as needed to fix type errors
|
||||||
|
|
||||||
|
- [ ] Enable `strict: true`, `noImplicitAny: true`, `strictNullChecks: true`
|
||||||
|
- [ ] Fix all resulting type errors
|
||||||
|
- [ ] Verify build passes
|
||||||
|
- [ ] Commit
|
||||||
647
docs/superpowers/plans/2026-03-26-erp-api-migration.md
Normal file
647
docs/superpowers/plans/2026-03-26-erp-api-migration.md
Normal file
@@ -0,0 +1,647 @@
|
|||||||
|
# ERP API Migration — Replace NocoDB Museum Data with Hono ERP API
|
||||||
|
|
||||||
|
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||||
|
|
||||||
|
**Goal:** Replace NocoDB as the museum sales data source with the Hono ERP API, keeping NocoDB only for PilgrimStats. Add "channel" as a new filterable dimension (replacing "district").
|
||||||
|
|
||||||
|
**Architecture:** The Hono ERP API returns transaction-level sales data (each sale with nested `Products[]`). We authenticate via JWT (POST `/auth/login`), then fetch by date range (GET `/api/getbydate`). Client-side code aggregates transactions into daily/museum/channel records that match the existing `MuseumRecord` shape. NocoDB remains solely for PilgrimStats.
|
||||||
|
|
||||||
|
**Tech Stack:** React 19, TypeScript (strict), Vite, Chart.js, Hono ERP REST API (Azure-hosted)
|
||||||
|
|
||||||
|
**Security note:** ERP credentials are stored as `VITE_*` env vars which get bundled into the client-side JS (same pattern as the existing NocoDB token). The ERP account (`sales_user`) is a read-only reporting account. A server-side proxy can be added later if needed.
|
||||||
|
|
||||||
|
**Compilation note:** Tasks 3–9 form an atomic migration — the codebase will not compile between them. They must be executed as a single batch on one branch. Individual commits are for traceability, not for producing intermediate working states.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File Structure
|
||||||
|
|
||||||
|
| Action | File | Responsibility |
|
||||||
|
|--------|------|----------------|
|
||||||
|
| Create | `src/utils/fetchHelpers.ts` | Shared `fetchWithTimeout` + `fetchWithRetry` (extracted from dataService) |
|
||||||
|
| Create | `src/config/museumMapping.ts` | Product description → museum mapping + channel label mapping |
|
||||||
|
| Create | `src/services/erpService.ts` | ERP API auth, fetching, transaction → MuseumRecord aggregation |
|
||||||
|
| Modify | `src/types/index.ts` | Add `channel` to MuseumRecord, add ERP API types, remove NocoDB museum types, remove `DistrictMuseumMap` |
|
||||||
|
| Modify | `src/services/dataService.ts` | Replace NocoDB fetch with ERP fetch, replace district→channel in grouping/filter functions, remove `revenue_incl_tax` fallbacks |
|
||||||
|
| Modify | `src/components/Dashboard.tsx` | Replace district filter/chart with channel, update `filterKeys` array, update all district references |
|
||||||
|
| Modify | `src/components/Comparison.tsx` | Replace district filter with channel |
|
||||||
|
| Modify | `src/components/Slides.tsx` | Full refactor: replace `DistrictMuseumMap` prop threading (10+ call sites), `SlideConfig.district`→`channel`, update `SlideEditor`/`SlidePreview`/`PreviewMode` interfaces, update `generateSlideHTML`/`generateChartScripts` |
|
||||||
|
| Modify | `src/locales/en.json` | Replace district→channel keys, add `charts.channel`, update error messages |
|
||||||
|
| Modify | `src/locales/ar.json` | Arabic translations for all channel-related keys |
|
||||||
|
| Modify | `src/App.tsx` | Update env var check for ERP config |
|
||||||
|
| Modify | `.env.local` | Add ERP API credentials |
|
||||||
|
| Modify | `.env.example` | Update to reflect ERP as primary museum data source |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 1: Environment Configuration
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `.env.local`
|
||||||
|
- Modify: `.env.example`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Add ERP env vars to `.env.local`**
|
||||||
|
|
||||||
|
```env
|
||||||
|
# Hono ERP API (museum sales data)
|
||||||
|
VITE_ERP_API_URL=<see .env.local on machine>
|
||||||
|
VITE_ERP_API_CODE=<see .env.local on machine>
|
||||||
|
VITE_ERP_USERNAME=<see .env.local on machine>
|
||||||
|
VITE_ERP_PASSWORD=<see .env.local on machine>
|
||||||
|
```
|
||||||
|
|
||||||
|
The actual values are in the Postman collection at `~/Downloads/hono-erp Copy.postman_collection.json`. Read that file for the credentials. Keep existing NocoDB vars (needed for PilgrimStats).
|
||||||
|
|
||||||
|
- [ ] **Step 2: Update `.env.example`**
|
||||||
|
|
||||||
|
Update to document both data sources:
|
||||||
|
|
||||||
|
```env
|
||||||
|
# Hono ERP API (museum sales data — primary source)
|
||||||
|
VITE_ERP_API_URL=https://hono-erp.azurewebsites.net
|
||||||
|
VITE_ERP_API_CODE=your-api-function-key
|
||||||
|
VITE_ERP_USERNAME=your-username
|
||||||
|
VITE_ERP_PASSWORD=your-password
|
||||||
|
|
||||||
|
# NocoDB (PilgrimStats only)
|
||||||
|
VITE_NOCODB_URL=http://localhost:8090
|
||||||
|
VITE_NOCODB_TOKEN=your-token
|
||||||
|
VITE_NOCODB_BASE_ID=your-base-id
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 3: Commit** (`.env.local` is gitignored — only commit `.env.example`)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add .env.example
|
||||||
|
git commit -m "feat: update env example for ERP API as primary museum data source"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 2: Extract Fetch Helpers
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Create: `src/utils/fetchHelpers.ts`
|
||||||
|
- Modify: `src/services/dataService.ts` (update imports)
|
||||||
|
|
||||||
|
- [ ] **Step 1: Extract `fetchWithTimeout` and `fetchWithRetry`**
|
||||||
|
|
||||||
|
Move these two functions from `dataService.ts` into `src/utils/fetchHelpers.ts`. Export them. Also move the constants `FETCH_TIMEOUT_MS` and `MAX_RETRIES`.
|
||||||
|
|
||||||
|
- [ ] **Step 2: Update dataService.ts imports**
|
||||||
|
|
||||||
|
Replace the function definitions with:
|
||||||
|
```typescript
|
||||||
|
import { fetchWithTimeout, fetchWithRetry } from '../utils/fetchHelpers';
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 3: Verify build still works**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/utils/fetchHelpers.ts src/services/dataService.ts
|
||||||
|
git commit -m "refactor: extract fetch helpers to shared util"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 3: Museum Mapping Configuration
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Create: `src/config/museumMapping.ts`
|
||||||
|
|
||||||
|
Definitive mapping of all 47 known product descriptions to museum names, plus channel label mappings.
|
||||||
|
|
||||||
|
- [ ] **Step 1: Create museum mapping file**
|
||||||
|
|
||||||
|
The mapping uses keyword matching with a **priority order** — this matters for combo tickets. Check keywords in this order (first match wins):
|
||||||
|
|
||||||
|
1. **Revelation Exhibition** — keywords: `"Revelation"`, `"الوحي"` (catches combo tickets mentioning both الوحي and القرآن الكريم)
|
||||||
|
2. **Creation Story Museum** — keywords: `"Creation Story"`, `"قصة الخلق"`
|
||||||
|
3. **Holy Quraan Museum** — keywords: `"Holy Quraan"`, `"القرآن الكريم"`
|
||||||
|
4. **Trail To Hira Cave** — keywords: `"Trail To Hira"`, `"غار حراء"`
|
||||||
|
5. **Makkah Greets Us** — keywords: `"Makkah Greets"`
|
||||||
|
6. **VIP Experience** — keywords: `"VIP Experience"`
|
||||||
|
|
||||||
|
If no match: return `"Other"`.
|
||||||
|
|
||||||
|
Channel label mapping:
|
||||||
|
```typescript
|
||||||
|
const CHANNEL_LABELS: Record<string, string> = {
|
||||||
|
'B2C': 'HiHala Website/App',
|
||||||
|
'B2B': 'B2B',
|
||||||
|
'POS': 'POS',
|
||||||
|
'Safiyyah POS': 'Safiyyah POS',
|
||||||
|
'Standalone': 'Standalone',
|
||||||
|
'Mobile': 'Mobile',
|
||||||
|
'Viva': 'Viva',
|
||||||
|
'IT': 'IT'
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Exports:
|
||||||
|
- `getMuseumFromProduct(productDescription: string): string`
|
||||||
|
- `getChannelLabel(operatingAreaName: string): string`
|
||||||
|
- `MUSEUM_NAMES: string[]`
|
||||||
|
- `CHANNEL_LABELS: Record<string, string>`
|
||||||
|
|
||||||
|
- [ ] **Step 2: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/config/museumMapping.ts
|
||||||
|
git commit -m "feat: add product-to-museum and channel mapping config"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 4: TypeScript Types Update
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/types/index.ts`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Update MuseumRecord**
|
||||||
|
|
||||||
|
Replace `district` with `channel`. Remove `museum_code` and `revenue_incl_tax` (legacy).
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export interface MuseumRecord {
|
||||||
|
date: string;
|
||||||
|
museum_name: string;
|
||||||
|
channel: string; // was: district
|
||||||
|
visits: number; // = sum of PeopleCount per product line
|
||||||
|
tickets: number; // = sum of UnitQuantity per product line
|
||||||
|
revenue_gross: number; // = sum of TotalPrice (includes VAT)
|
||||||
|
revenue_net: number; // = revenue_gross - sum of TaxAmount
|
||||||
|
year: string;
|
||||||
|
quarter: string;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 2: Add ERP API types**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export interface ERPProduct {
|
||||||
|
ProductDescription: string;
|
||||||
|
SiteDescription: string | null;
|
||||||
|
UnitQuantity: number;
|
||||||
|
PeopleCount: number;
|
||||||
|
TaxAmount: number;
|
||||||
|
TotalPrice: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ERPPayment {
|
||||||
|
PaymentMethodDescription: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ERPSaleRecord {
|
||||||
|
SaleId: number;
|
||||||
|
TransactionDate: string;
|
||||||
|
CustIdentification: string;
|
||||||
|
OperatingAreaName: string;
|
||||||
|
Payments: ERPPayment[];
|
||||||
|
Products: ERPProduct[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ERPLoginResponse {
|
||||||
|
token: string;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 3: Update Filters interface**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export interface Filters {
|
||||||
|
year: string;
|
||||||
|
channel: string; // was: district
|
||||||
|
museum: string;
|
||||||
|
quarter: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DateRangeFilters {
|
||||||
|
channel: string; // was: district
|
||||||
|
museum: string;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 4: Remove obsolete types**
|
||||||
|
|
||||||
|
Remove: `NocoDBDistrict`, `NocoDBMuseum`, `NocoDBDailyStat`, `DistrictMuseumMap`.
|
||||||
|
|
||||||
|
Update `SlideConfig.district` → `SlideConfig.channel`.
|
||||||
|
|
||||||
|
- [ ] **Step 5: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/types/index.ts
|
||||||
|
git commit -m "feat: update types for ERP API — channel replaces district"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 5: ERP Service
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Create: `src/services/erpService.ts`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Implement auth + fetch**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { fetchWithRetry } from '../utils/fetchHelpers';
|
||||||
|
import { getMuseumFromProduct, getChannelLabel } from '../config/museumMapping';
|
||||||
|
import type { ERPSaleRecord, ERPLoginResponse, MuseumRecord } from '../types';
|
||||||
|
|
||||||
|
const ERP_API_URL = import.meta.env.VITE_ERP_API_URL || '';
|
||||||
|
const ERP_API_CODE = import.meta.env.VITE_ERP_API_CODE || '';
|
||||||
|
const ERP_USERNAME = import.meta.env.VITE_ERP_USERNAME || '';
|
||||||
|
const ERP_PASSWORD = import.meta.env.VITE_ERP_PASSWORD || '';
|
||||||
|
|
||||||
|
let cachedToken: string | null = null;
|
||||||
|
|
||||||
|
async function login(): Promise<string> { /* POST /auth/login, cache token */ }
|
||||||
|
async function fetchSalesByDateRange(startDate: string, endDate: string): Promise<ERPSaleRecord[]> { /* GET /api/getbydate with Bearer token + code param */ }
|
||||||
|
```
|
||||||
|
|
||||||
|
Auth: token cached in module-level variable, re-login on 401.
|
||||||
|
|
||||||
|
Fetch strategy: generate month boundaries from 2024-01 to current month, fetch all in parallel with `Promise.all`.
|
||||||
|
|
||||||
|
- [ ] **Step 2: Implement aggregation function**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export function aggregateTransactions(sales: ERPSaleRecord[]): MuseumRecord[]
|
||||||
|
```
|
||||||
|
|
||||||
|
For each sale:
|
||||||
|
1. Extract date from `TransactionDate` (split on space, take first part → `"2025-01-01"`)
|
||||||
|
2. Get channel from `OperatingAreaName` via `getChannelLabel()`
|
||||||
|
3. For each product in `Products[]`:
|
||||||
|
- Get museum from `getMuseumFromProduct(product.ProductDescription)`
|
||||||
|
- Accumulate into key `${date}|${museum}|${channel}`:
|
||||||
|
- `visits += product.PeopleCount`
|
||||||
|
- `tickets += product.UnitQuantity`
|
||||||
|
- `revenue_gross += product.TotalPrice`
|
||||||
|
- `revenue_net += (product.TotalPrice - product.TaxAmount)`
|
||||||
|
4. Convert accumulated map to `MuseumRecord[]` with computed `year` and `quarter`
|
||||||
|
|
||||||
|
Negative quantities (refunds) sum correctly by default — no special handling needed.
|
||||||
|
|
||||||
|
- [ ] **Step 3: Export main entry point**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export async function fetchFromERP(): Promise<MuseumRecord[]>
|
||||||
|
```
|
||||||
|
|
||||||
|
This orchestrates: login → fetch all months → aggregate → return.
|
||||||
|
|
||||||
|
- [ ] **Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/services/erpService.ts
|
||||||
|
git commit -m "feat: add ERP service with auth, fetch, and aggregation"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 6: Refactor dataService.ts
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/services/dataService.ts`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Replace NocoDB museum fetch with ERP**
|
||||||
|
|
||||||
|
- Remove: `discoverTableIds()`, `fetchNocoDBTable()`, `fetchFromNocoDB()`, `MuseumMapEntry`, NocoDB museum env var reads
|
||||||
|
- Keep: NocoDB config for PilgrimStats path only (`fetchPilgrimStats()` unchanged)
|
||||||
|
- Import `fetchFromERP` from erpService
|
||||||
|
- Update `fetchData()` and `refreshData()` to call `fetchFromERP()` instead of `fetchFromNocoDB()`
|
||||||
|
- Update config check: `if (!ERP_API_URL || !ERP_API_CODE || !ERP_USERNAME || !ERP_PASSWORD)` → throw DataError('config')
|
||||||
|
|
||||||
|
- [ ] **Step 2: Replace all district→channel in data functions**
|
||||||
|
|
||||||
|
- `filterData()`: `filters.district`→`filters.channel`, `row.district`→`row.channel`
|
||||||
|
- `filterDataByDateRange()`: same
|
||||||
|
- `groupByDistrict()`→ rename to `groupByChannel()`, change `row.district`→`row.channel`
|
||||||
|
- `getUniqueDistricts()`→ rename to `getUniqueChannels()`, change `r.district`→`r.channel`
|
||||||
|
- `getDistrictMuseumMap()`→ **delete**
|
||||||
|
- `getMuseumsForDistrict()`→ replace with `getUniqueMuseums(data: MuseumRecord[]): string[]` returning all unique museum names sorted
|
||||||
|
- Remove all `revenue_incl_tax` fallback references (e.g. `|| row.revenue_incl_tax || 0` in groupBy functions and calculateMetrics)
|
||||||
|
|
||||||
|
- [ ] **Step 3: Update type imports**
|
||||||
|
|
||||||
|
Remove unused NocoDB types from the import block. Add imports for new ERP-related types if needed.
|
||||||
|
|
||||||
|
- [ ] **Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/services/dataService.ts
|
||||||
|
git commit -m "refactor: replace NocoDB museum fetch with ERP API, district→channel"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 7: Update Dashboard Component
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/components/Dashboard.tsx`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Update imports**
|
||||||
|
|
||||||
|
- `getUniqueDistricts` → `getUniqueChannels`
|
||||||
|
- `getDistrictMuseumMap` → remove
|
||||||
|
- `getMuseumsForDistrict` → `getUniqueMuseums`
|
||||||
|
- `groupByDistrict` → `groupByChannel`
|
||||||
|
|
||||||
|
- [ ] **Step 2: Update filter state and controls**
|
||||||
|
|
||||||
|
- `defaultFilters`: `district: 'all'` → `channel: 'all'`
|
||||||
|
- `filterKeys` array (line 32): `'district'` → `'channel'` — this controls URL param serialization
|
||||||
|
- All `filters.district` → `filters.channel`
|
||||||
|
- District `<select>` → Channel `<select>` with `t('filters.channel')` label
|
||||||
|
- Museum `<select>`: no longer cascaded from district/channel — just show all `getUniqueMuseums(data)`
|
||||||
|
- `availableMuseums` memo: `getMuseumsForDistrict(districtMuseumMap, filters.district)` → `getUniqueMuseums(data)`
|
||||||
|
- Remove `districtMuseumMap` memo entirely
|
||||||
|
|
||||||
|
- [ ] **Step 3: Update charts**
|
||||||
|
|
||||||
|
- `districtData` → `channelData` using `groupByChannel(filteredData, includeVAT)`
|
||||||
|
- "District Performance" → "Channel Performance" using `t('dashboard.channelPerformance')`
|
||||||
|
- Chart carousel label `t('charts.district')` → `t('charts.channel')` (line 88)
|
||||||
|
- Capture rate chart: `filters.district` → `filters.channel`, `r.district` → `r.channel`
|
||||||
|
|
||||||
|
- [ ] **Step 4: Update quarterly table**
|
||||||
|
|
||||||
|
Replace `filters.district` → `filters.channel` and `r.district` → `r.channel`
|
||||||
|
|
||||||
|
- [ ] **Step 5: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/components/Dashboard.tsx
|
||||||
|
git commit -m "feat: update Dashboard — channel replaces district, new channel chart"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 8: Update Comparison Component
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/components/Comparison.tsx`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Update imports and filter references**
|
||||||
|
|
||||||
|
- Replace `getUniqueDistricts` → `getUniqueChannels`
|
||||||
|
- Remove `getDistrictMuseumMap`, `getMuseumsForDistrict`
|
||||||
|
- Add `getUniqueMuseums`
|
||||||
|
- Replace all `district` filter references with `channel` (includes URL params, `DateRangeFilters` usage, `<select>` elements)
|
||||||
|
- Museum filter: use `getUniqueMuseums(data)` (no longer cascaded)
|
||||||
|
|
||||||
|
- [ ] **Step 2: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/components/Comparison.tsx
|
||||||
|
git commit -m "feat: update Comparison — channel replaces district"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 9: Update Slides Component (FULL REFACTOR)
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/components/Slides.tsx`
|
||||||
|
|
||||||
|
This is a significant change — Slides.tsx has 30+ district references across 10+ call sites including function signatures, prop interfaces, and HTML export generation.
|
||||||
|
|
||||||
|
- [ ] **Step 1: Update imports**
|
||||||
|
|
||||||
|
- Replace `getUniqueDistricts` → `getUniqueChannels`
|
||||||
|
- Replace `getDistrictMuseumMap` → remove
|
||||||
|
- Replace `getMuseumsForDistrict` → `getUniqueMuseums`
|
||||||
|
- Remove import of `DistrictMuseumMap` type
|
||||||
|
|
||||||
|
- [ ] **Step 2: Update component interfaces**
|
||||||
|
|
||||||
|
All three interfaces pass `districts: string[]` and `districtMuseumMap: DistrictMuseumMap`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// SlideEditorProps (line 25): districts→channels, remove districtMuseumMap
|
||||||
|
interface SlideEditorProps {
|
||||||
|
slide: SlideConfig;
|
||||||
|
onUpdate: (updates: Partial<SlideConfig>) => void;
|
||||||
|
channels: string[];
|
||||||
|
museums: string[]; // flat list, independent of channel
|
||||||
|
data: MuseumRecord[];
|
||||||
|
chartTypes: ChartTypeOption[];
|
||||||
|
metrics: MetricOption[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// SlidePreviewProps (line 35): same pattern
|
||||||
|
interface SlidePreviewProps {
|
||||||
|
slide: SlideConfig;
|
||||||
|
data: MuseumRecord[];
|
||||||
|
channels: string[];
|
||||||
|
museums: string[];
|
||||||
|
metrics: MetricOption[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// PreviewModeProps (line 43): same pattern
|
||||||
|
interface PreviewModeProps {
|
||||||
|
slides: SlideConfig[];
|
||||||
|
data: MuseumRecord[];
|
||||||
|
channels: string[];
|
||||||
|
museums: string[];
|
||||||
|
currentSlide: number;
|
||||||
|
setCurrentSlide: React.Dispatch<React.SetStateAction<number>>;
|
||||||
|
onExit: () => void;
|
||||||
|
metrics: MetricOption[];
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 3: Update Slides() main function**
|
||||||
|
|
||||||
|
- `districts` memo → `channels` using `getUniqueChannels(data)`
|
||||||
|
- `districtMuseumMap` memo → `museums` using `getUniqueMuseums(data)`
|
||||||
|
- `defaultSlide.district: 'all'` → `channel: 'all'`
|
||||||
|
- Update all prop passing: `districts={districts} districtMuseumMap={districtMuseumMap}` → `channels={channels} museums={museums}`
|
||||||
|
|
||||||
|
- [ ] **Step 4: Update SlideEditor function**
|
||||||
|
|
||||||
|
- `getMuseumsForDistrict(districtMuseumMap, slide.district)` → just use `museums` prop directly
|
||||||
|
- Filter label: `t('filters.district')` → `t('filters.channel')`
|
||||||
|
- `<select>` for district → channel: `slide.district` → `slide.channel`, `onUpdate({ district: ... })` → `onUpdate({ channel: ... })`
|
||||||
|
- Museum select: no longer cascaded, just show all `museums`
|
||||||
|
|
||||||
|
- [ ] **Step 5: Update SlidePreview function**
|
||||||
|
|
||||||
|
- `district: slide.district` → `channel: slide.channel` in `filterDataByDateRange` calls
|
||||||
|
|
||||||
|
- [ ] **Step 6: Update generateSlideHTML and generateChartScripts**
|
||||||
|
|
||||||
|
- Function signatures: remove `districts: string[]` and `districtMuseumMap: DistrictMuseumMap` params, add `channels: string[]` and `museums: string[]`
|
||||||
|
- Internal references: `slide.district` → `slide.channel`
|
||||||
|
- `filterDataByDateRange` calls: `district:` → `channel:`
|
||||||
|
|
||||||
|
- [ ] **Step 7: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/components/Slides.tsx
|
||||||
|
git commit -m "feat: update Slides — full district→channel refactor across all interfaces"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 10: Update Locale Files
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/locales/en.json`
|
||||||
|
- Modify: `src/locales/ar.json`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Update English translations**
|
||||||
|
|
||||||
|
Replace/add:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"filters": {
|
||||||
|
"channel": "Channel",
|
||||||
|
"allChannels": "All Channels"
|
||||||
|
},
|
||||||
|
"charts": {
|
||||||
|
"channel": "Channel"
|
||||||
|
},
|
||||||
|
"dashboard": {
|
||||||
|
"subtitle": "Museum analytics from Hono ERP",
|
||||||
|
"channelPerformance": "Channel Performance"
|
||||||
|
},
|
||||||
|
"errors": {
|
||||||
|
"config": "The dashboard is not configured. Please set up the ERP API connection."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Remove: `filters.district`, `filters.allDistricts`, `charts.district`, `dashboard.districtPerformance`.
|
||||||
|
|
||||||
|
- [ ] **Step 2: Update Arabic translations**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"filters": {
|
||||||
|
"channel": "القناة",
|
||||||
|
"allChannels": "جميع القنوات"
|
||||||
|
},
|
||||||
|
"charts": {
|
||||||
|
"channel": "القناة"
|
||||||
|
},
|
||||||
|
"dashboard": {
|
||||||
|
"subtitle": "تحليلات المتاحف من نظام Hono ERP",
|
||||||
|
"channelPerformance": "أداء القنوات"
|
||||||
|
},
|
||||||
|
"errors": {
|
||||||
|
"config": "لم يتم تهيئة لوحة المعلومات. يرجى إعداد اتصال ERP API."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Remove: `filters.district`, `filters.allDistricts`, `charts.district`, `dashboard.districtPerformance`.
|
||||||
|
|
||||||
|
- [ ] **Step 3: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add src/locales/en.json src/locales/ar.json
|
||||||
|
git commit -m "feat: update locale files — channel replaces district, ERP error messages"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 11: Build Verification & Smoke Test
|
||||||
|
|
||||||
|
- [ ] **Step 1: Run `npm run build` to verify TypeScript compiles**
|
||||||
|
|
||||||
|
All `district` references should be gone. Any remaining will cause TS errors. Also search for `revenue_incl_tax` and `museum_code` — these should be fully removed.
|
||||||
|
|
||||||
|
- [ ] **Step 2: Run the dev server and verify**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
Check:
|
||||||
|
1. Dashboard loads with data from ERP API (not NocoDB)
|
||||||
|
2. Channel filter shows: HiHala Website/App, B2B, POS, Safiyyah POS, Standalone, Mobile, Viva, IT
|
||||||
|
3. Museum filter shows: Revelation Exhibition, Creation Story Museum, Holy Quraan Museum, Trail To Hira Cave, Makkah Greets Us, VIP Experience
|
||||||
|
4. Museum and channel filters work independently (not cascaded)
|
||||||
|
5. All charts render: revenue trend, visitors by museum, revenue by museum, quarterly YoY, **channel performance** (was district), capture rate
|
||||||
|
6. Comparison page works with channel filter
|
||||||
|
7. Slides page works — create/preview/export with channel filter
|
||||||
|
8. PilgrimStats loads from NocoDB (capture rate chart shows pilgrim data)
|
||||||
|
9. Cache works (reload → uses cached data)
|
||||||
|
10. Offline fallback works (disconnect → shows cached data with offline badge)
|
||||||
|
|
||||||
|
- [ ] **Step 3: Commit any fixes**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Appendix: All 47 Known Product Descriptions
|
||||||
|
|
||||||
|
For reference when building the mapping in Task 3.
|
||||||
|
|
||||||
|
**Revelation Exhibition (12 products):**
|
||||||
|
1. `Revelation Exhibition - Child`
|
||||||
|
2. `Revelation Exhibition - Groups`
|
||||||
|
3. `Revelation Exhibition - Individuals`
|
||||||
|
4. `Revelation Exhibition - POD`
|
||||||
|
5. `Revelation Exhibition and Trail To Hiraa Cave - Individuals` _(combo → Revelation)_
|
||||||
|
6. `معرض الوحي - أطفال | Revelation Exhibition - Child`
|
||||||
|
7. `معرض الوحي - أفراد | Revelation Exhibition - Individuals`
|
||||||
|
8. `معرض الوحي - المجموعات | Revelation Exhibition - Group`
|
||||||
|
9. `معرض الوحي - ذوي الإعاقة | Revelation Exhibition - POD`
|
||||||
|
10. `معرض الوحي - مجموعات| Revelation Exhibition - Groups`
|
||||||
|
11. `تذكرة دخول أفراد - معرض الوحي | متحف القرآن الكريم` _(combo → Revelation, because الوحي matched first)_
|
||||||
|
12. `تذكرة دخول مجموعات - معرض الوحي | متحف القرآن الكريم` _(combo → Revelation)_
|
||||||
|
|
||||||
|
**Creation Story Museum (21 products):**
|
||||||
|
1. `Creation Story - Groups`
|
||||||
|
2. `Creation Story - Individuals`
|
||||||
|
3. `Creation Story - Groups` _(extra space variant)_
|
||||||
|
4. `Creation Story - Indviduals - Open Date` _(typo "Indviduals" is in the source data)_
|
||||||
|
5. `Creation Story Group`
|
||||||
|
6. `Creation Story Individual`
|
||||||
|
7. `Creation Story School`
|
||||||
|
8. `متحف قصة الخلق - أفراد | Creation Story Museum - Individuals`
|
||||||
|
9. `متحف قصة الخلق - مجموعات| Creation Story Museum - Group`
|
||||||
|
10. `متحف قصة الخلق - مدرسة | Creation Story Museum - School`
|
||||||
|
11. `متحف قصة الخلق - أفراد - خصم بولمان زمزم`
|
||||||
|
12. `متحف قصة الخلق - مجموعات - خصم بولمان زمزم`
|
||||||
|
13. `تذكرة دخول متحف قصة الخلق (جامعة) | Creation Story Museum`
|
||||||
|
14. `تذكرة دخول متحف قصة الخلق مخفضة | Creation Story Museum`
|
||||||
|
15. `تذكرة دخول متحف قصة الخلق مخفضة 10 | Creation Story Museum`
|
||||||
|
16. `تذكرة دخول متحف قصة الخلق مخفضة 11.5 | Creation Story Museum`
|
||||||
|
17. `تذكرة دخول متحف قصة الخلق مخفضة 15 | Creation Story Museum`
|
||||||
|
18. `تذكرة دخول متحف قصة الخلق مخفضة 19 | Creation Story Museum`
|
||||||
|
19. `تذكرة مجانية دخول متحف قصة الخلق (ترويجية) | Creation Sto`
|
||||||
|
20. `تذكرة مجانية دخول متحف قصة الخلق (ذوي الهمم) | Creation Sto`
|
||||||
|
21. ` تذكرة مجانية دخول متحف قصة الخلق (أطفال) | Creation Story ` _(leading space)_
|
||||||
|
|
||||||
|
**Holy Quraan Museum (8 products):**
|
||||||
|
1. `Holy Quraan Museum - Child`
|
||||||
|
2. `Holy Quraan Museum - Child | متحف القرآن الكريم - أطفال`
|
||||||
|
3. `Holy Quraan Museum - Groups`
|
||||||
|
4. `Holy Quraan Museum - Groups | متحف القرآن الكريم - المجموعات`
|
||||||
|
5. `Holy Quraan Museum - Individu | متحف القرآن الكريم - أفراد`
|
||||||
|
6. `Holy Quraan Museum - Individuals`
|
||||||
|
7. `Holy Quraan Museum - POD`
|
||||||
|
8. `Holy Quraan Museum - POD | متحف القرآن الكريم - ذوي الإعاقة`
|
||||||
|
|
||||||
|
**Trail To Hira Cave (3 products):**
|
||||||
|
1. `Trail To Hira Cave - Car | غار حراء - الصعود بالسيارة`
|
||||||
|
2. `Trail To Hira Cave - Walking`
|
||||||
|
3. `Trail To Hira Cave - Walking | غار حراء - الصعود على الأقدام`
|
||||||
|
|
||||||
|
**Makkah Greets Us (1 product):**
|
||||||
|
1. `Makkah Greets us - Entry Ticket`
|
||||||
|
|
||||||
|
**VIP Experience (1 product):**
|
||||||
|
1. `VIP Experience`
|
||||||
|
|
||||||
|
**Total: 12 + 21 + 8 + 3 + 1 + 1 = 46 products**
|
||||||
242
docs/superpowers/specs/2026-03-26-etl-pipeline-design.md
Normal file
242
docs/superpowers/specs/2026-03-26-etl-pipeline-design.md
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
# ETL Pipeline: ERP → NocoDB Daily Sales
|
||||||
|
|
||||||
|
## Goal
|
||||||
|
|
||||||
|
Replace the current client-side ERP fetching (which downloads hundreds of MBs of raw transactions to the browser) with a server-side ETL pipeline that aggregates ERP data into NocoDB. The dashboard reads pre-aggregated data from NocoDB — fast and lightweight.
|
||||||
|
|
||||||
|
## Data Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
Daily (2am cron):
|
||||||
|
ERP API → Server (fetch + aggregate) → NocoDB "DailySales" table
|
||||||
|
|
||||||
|
On page load:
|
||||||
|
NocoDB "DailySales" → Dashboard client (small payload, fast)
|
||||||
|
```
|
||||||
|
|
||||||
|
## NocoDB "DailySales" Table
|
||||||
|
|
||||||
|
One row per date/museum/channel combination. Flat — no lookup tables needed.
|
||||||
|
|
||||||
|
| Column | Type | Example |
|
||||||
|
|--------|------|---------|
|
||||||
|
| Date | string | `2025-03-01` |
|
||||||
|
| MuseumName | string | `Revelation Exhibition` |
|
||||||
|
| Channel | string | `HiHala Website/App` |
|
||||||
|
| Visits | number | `702` |
|
||||||
|
| Tickets | number | `71` |
|
||||||
|
| GrossRevenue | number | `12049.00` |
|
||||||
|
| NetRevenue | number | `10477.40` |
|
||||||
|
|
||||||
|
Museums are derived from product descriptions using a priority-ordered keyword mapping (46 products → 6 museums). Channels are derived from `OperatingAreaName` with display labels (e.g. B2C → "HiHala Website/App").
|
||||||
|
|
||||||
|
## Server Architecture
|
||||||
|
|
||||||
|
### New files
|
||||||
|
|
||||||
|
| File | Responsibility |
|
||||||
|
|------|----------------|
|
||||||
|
| `server/src/config/museumMapping.ts` | Product → museum mapping, channel labels (moved from client) |
|
||||||
|
| `server/src/types.ts` | Server-side ERP types (`ERPSaleRecord`, `ERPProduct`, `ERPPayment`, `AggregatedRecord`) |
|
||||||
|
| `server/src/services/nocodbClient.ts` | NocoDB table discovery (via `process.env`, NOT `import.meta.env`) + paginated read/write |
|
||||||
|
| `server/src/services/etlSync.ts` | Orchestrate: fetch ERP → aggregate → write NocoDB |
|
||||||
|
| `server/src/routes/etl.ts` | `POST /api/etl/sync` endpoint (protected by secret token) |
|
||||||
|
|
||||||
|
### Modified files
|
||||||
|
|
||||||
|
| File | Change |
|
||||||
|
|------|--------|
|
||||||
|
| `server/src/config.ts` | Add NocoDB config (`process.env.NOCODB_*`) |
|
||||||
|
| `server/src/index.ts` | Mount ETL route |
|
||||||
|
| `server/.env` | Add `NOCODB_*` and `ETL_SECRET` vars |
|
||||||
|
| `server/.env.example` | Add `NOCODB_*` and `ETL_SECRET` placeholders |
|
||||||
|
| `src/services/dataService.ts` | Revert to NocoDB fetch with paginated reads for DailySales |
|
||||||
|
|
||||||
|
### Removed files
|
||||||
|
|
||||||
|
| File | Reason |
|
||||||
|
|------|--------|
|
||||||
|
| `server/src/routes/erp.ts` | Client no longer calls ERP directly |
|
||||||
|
| `src/services/erpService.ts` | Client no longer aggregates transactions |
|
||||||
|
| `src/config/museumMapping.ts` | Moved to server |
|
||||||
|
|
||||||
|
## ETL Sync Endpoint
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /api/etl/sync?mode=full|incremental
|
||||||
|
Authorization: Bearer <ETL_SECRET>
|
||||||
|
```
|
||||||
|
|
||||||
|
Protected by a secret token (`ETL_SECRET` env var). Requests without a valid token get 401. The cron passes it: `curl -H "Authorization: Bearer $ETL_SECRET" -X POST ...`.
|
||||||
|
|
||||||
|
- **incremental** (default): fetch current month from ERP, aggregate, upsert into NocoDB. Used by daily cron.
|
||||||
|
- **full**: fetch all months from 2024-01 to now, clear and replace all NocoDB DailySales data. Used for initial setup or recovery.
|
||||||
|
|
||||||
|
### Incremental date range
|
||||||
|
|
||||||
|
The current month is defined as:
|
||||||
|
- `startDate`: `YYYY-MM-01T00:00:00` (first of current month)
|
||||||
|
- `endDate`: `YYYY-{MM+1}-01T00:00:00` (first of next month, exclusive)
|
||||||
|
|
||||||
|
This matches the convention already used in `erpService.ts` month boundary generation.
|
||||||
|
|
||||||
|
Response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"status": "ok",
|
||||||
|
"mode": "incremental",
|
||||||
|
"transactionsFetched": 12744,
|
||||||
|
"recordsWritten": 342,
|
||||||
|
"duration": "8.2s"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Aggregation Logic
|
||||||
|
|
||||||
|
For each ERP transaction:
|
||||||
|
1. Extract date from `TransactionDate` (split on space, take first part)
|
||||||
|
2. Map `OperatingAreaName` → channel label via `getChannelLabel()`
|
||||||
|
3. For each product in `Products[]`:
|
||||||
|
- Map `ProductDescription` → museum name via `getMuseumFromProduct()` (priority-ordered keyword matching)
|
||||||
|
- Accumulate into composite key `date|museum|channel`:
|
||||||
|
- `visits += PeopleCount`
|
||||||
|
- `tickets += UnitQuantity`
|
||||||
|
- `GrossRevenue += TotalPrice`
|
||||||
|
- `NetRevenue += TotalPrice - TaxAmount`
|
||||||
|
|
||||||
|
Negative quantities (refunds) sum correctly by default.
|
||||||
|
|
||||||
|
## NocoDB Upsert Strategy
|
||||||
|
|
||||||
|
For **incremental** sync:
|
||||||
|
1. Delete all rows in DailySales where `Date` falls within the fetched month range
|
||||||
|
2. Insert the newly aggregated rows
|
||||||
|
|
||||||
|
For **full** sync:
|
||||||
|
1. Delete all rows in DailySales
|
||||||
|
2. Insert all aggregated rows
|
||||||
|
|
||||||
|
This avoids duplicate detection complexity — just replace the month's data.
|
||||||
|
|
||||||
|
### Race condition note
|
||||||
|
|
||||||
|
During the delete/insert window, dashboard reads may see incomplete data. Mitigations:
|
||||||
|
- The sync runs at 2am when traffic is minimal
|
||||||
|
- The client's localStorage cache (7-day TTL) means most page loads never hit NocoDB
|
||||||
|
- The client checks if fetched data is suspiciously small (< 10 rows) and prefers cached data over a likely-incomplete NocoDB read
|
||||||
|
- For full syncs, the window is larger (~2-5 minutes). If this becomes a problem, a shadow-table swap pattern can be added later.
|
||||||
|
|
||||||
|
## Client Changes
|
||||||
|
|
||||||
|
### dataService.ts
|
||||||
|
|
||||||
|
Revert to reading from NocoDB. The `DailySales` table is flat, so no joins needed. **Must use paginated fetch** (NocoDB defaults to 25 rows per page, max 1000). The existing `fetchNocoDBTable()` helper already handles pagination — reintroduce it.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
async function fetchFromNocoDB(): Promise<MuseumRecord[]> {
|
||||||
|
const tables = await discoverTableIds();
|
||||||
|
const rows = await fetchNocoDBTable<NocoDBDailySale>(tables['DailySales']);
|
||||||
|
return rows.map(row => ({
|
||||||
|
date: row.Date,
|
||||||
|
museum_name: row.MuseumName,
|
||||||
|
channel: row.Channel,
|
||||||
|
visits: row.Visits,
|
||||||
|
tickets: row.Tickets,
|
||||||
|
revenue_gross: row.GrossRevenue,
|
||||||
|
revenue_net: row.NetRevenue,
|
||||||
|
year: row.Date.substring(0, 4),
|
||||||
|
quarter: computeQuarter(row.Date),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Add a `NocoDBDailySale` type to `src/types/index.ts`:
|
||||||
|
```typescript
|
||||||
|
export interface NocoDBDailySale {
|
||||||
|
Id: number;
|
||||||
|
Date: string;
|
||||||
|
MuseumName: string;
|
||||||
|
Channel: string;
|
||||||
|
Visits: number;
|
||||||
|
Tickets: number;
|
||||||
|
GrossRevenue: number;
|
||||||
|
NetRevenue: number;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
No `Districts`, `Museums`, or `DailyStats` tables needed — just `DailySales` and `PilgrimStats`.
|
||||||
|
|
||||||
|
### Suspicious data check
|
||||||
|
|
||||||
|
In `fetchData()`, if NocoDB returns fewer than 10 rows and a cache exists, prefer the cache:
|
||||||
|
```typescript
|
||||||
|
if (data.length < 10 && cached) {
|
||||||
|
console.warn('NocoDB returned suspiciously few rows, using cache');
|
||||||
|
return { data: cached.data, fromCache: true, cacheTimestamp: cached.timestamp };
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Server Environment
|
||||||
|
|
||||||
|
Add to `server/.env`:
|
||||||
|
```
|
||||||
|
NOCODB_URL=http://localhost:8090
|
||||||
|
NOCODB_TOKEN=<token>
|
||||||
|
NOCODB_BASE_ID=<base_id>
|
||||||
|
ETL_SECRET=<random-secret-for-cron>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** Client `.env.local` retains its existing `VITE_NOCODB_*` vars — the client still reads NocoDB directly for both DailySales and PilgrimStats.
|
||||||
|
|
||||||
|
Update `server/.env.example` with the same keys (placeholder values).
|
||||||
|
|
||||||
|
## Server-Side Types
|
||||||
|
|
||||||
|
ERP types are re-declared in `server/src/types.ts` (not imported from the client `src/types/index.ts`):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export interface ERPProduct {
|
||||||
|
ProductDescription: string;
|
||||||
|
SiteDescription: string | null;
|
||||||
|
UnitQuantity: number;
|
||||||
|
PeopleCount: number;
|
||||||
|
TaxAmount: number;
|
||||||
|
TotalPrice: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ERPSaleRecord {
|
||||||
|
SaleId: number;
|
||||||
|
TransactionDate: string;
|
||||||
|
CustIdentification: string;
|
||||||
|
OperatingAreaName: string;
|
||||||
|
Payments: Array<{ PaymentMethodDescription: string }>;
|
||||||
|
Products: ERPProduct[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AggregatedRecord {
|
||||||
|
Date: string;
|
||||||
|
MuseumName: string;
|
||||||
|
Channel: string;
|
||||||
|
Visits: number;
|
||||||
|
Tickets: number;
|
||||||
|
GrossRevenue: number;
|
||||||
|
NetRevenue: number;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cron
|
||||||
|
|
||||||
|
```bash
|
||||||
|
0 2 * * * curl -s -H "Authorization: Bearer $ETL_SECRET" -X POST http://localhost:3002/api/etl/sync
|
||||||
|
```
|
||||||
|
|
||||||
|
Runs daily at 2am. The incremental mode fetches only the current month (~15-25K transactions), aggregates server-side, and writes ~300-500 rows to NocoDB.
|
||||||
|
|
||||||
|
## What's NOT Changing
|
||||||
|
|
||||||
|
- PilgrimStats still fetched from NocoDB by the client (unchanged)
|
||||||
|
- Client `.env.local` retains `VITE_NOCODB_*` vars (still needed for client reads)
|
||||||
|
- All dashboard UI components (Dashboard, Comparison) stay as-is
|
||||||
|
- Channel and museum filters stay as-is
|
||||||
|
- Cache/offline fallback logic stays as-is (enhanced with suspicious-data check)
|
||||||
|
- Dark mode, i18n, accessibility — all unchanged
|
||||||
118
docs/superpowers/specs/2026-03-31-hijri-seasons-design.md
Normal file
118
docs/superpowers/specs/2026-03-31-hijri-seasons-design.md
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
# Hijri Seasons Feature
|
||||||
|
|
||||||
|
## Goal
|
||||||
|
|
||||||
|
Add configurable hijri seasons (Ramadan, Hajj, etc.) to the dashboard as a presentation overlay. Seasons are user-defined with Gregorian date ranges (since hijri months shift ~11 days each year). They appear as filter presets, chart bands, and are managed through a settings page.
|
||||||
|
|
||||||
|
## Data Storage
|
||||||
|
|
||||||
|
New NocoDB `Seasons` table:
|
||||||
|
|
||||||
|
| Column | Type | Example |
|
||||||
|
|--------|------|---------|
|
||||||
|
| Name | string | `Ramadan` |
|
||||||
|
| HijriYear | number | `1446` |
|
||||||
|
| StartDate | string | `2025-02-28` |
|
||||||
|
| EndDate | string | `2025-03-30` |
|
||||||
|
| Color | string | `#10b981` |
|
||||||
|
|
||||||
|
Read on dashboard load alongside PilgrimStats. Written via server proxy to keep NocoDB credentials server-side.
|
||||||
|
|
||||||
|
**Loading lifecycle:** Seasons load independently of the main data fetch. A failure to load seasons degrades gracefully — seasons state defaults to `[]`, the dashboard renders normally without bands or season presets. Seasons are non-blocking and non-critical.
|
||||||
|
|
||||||
|
## Server Changes
|
||||||
|
|
||||||
|
### New files
|
||||||
|
|
||||||
|
| File | Responsibility |
|
||||||
|
|------|----------------|
|
||||||
|
| `server/src/routes/seasons.ts` | `GET /api/seasons` (read all), `POST /api/seasons` (create), `PUT /api/seasons/:id` (update), `DELETE /api/seasons/:id` (delete) |
|
||||||
|
|
||||||
|
### Modified files
|
||||||
|
|
||||||
|
| File | Change |
|
||||||
|
|------|--------|
|
||||||
|
| `server/src/index.ts` | Mount seasons routes at `/api/seasons` |
|
||||||
|
| `server/src/services/nocodbClient.ts` | Add generic CRUD helpers typed as `<T extends Record<string, unknown>>` so both ETL and seasons routes can share them without coupling |
|
||||||
|
| `vite.config.ts` | Add `/api/seasons` proxy rule **before** the catch-all `/api` rule (same pattern as `/api/erp`). Order: `/api/erp` → `/api/etl` → `/api/seasons` → `/api` |
|
||||||
|
|
||||||
|
## Client Changes
|
||||||
|
|
||||||
|
### New files
|
||||||
|
|
||||||
|
| File | Responsibility |
|
||||||
|
|------|----------------|
|
||||||
|
| `src/components/Settings.tsx` | Settings page with seasons CRUD table |
|
||||||
|
| `src/services/seasonsService.ts` | Fetch/create/update/delete seasons via server proxy |
|
||||||
|
|
||||||
|
### Modified files
|
||||||
|
|
||||||
|
| File | Change |
|
||||||
|
|------|--------|
|
||||||
|
| `src/types/index.ts` | Add `Season` interface |
|
||||||
|
| `src/App.tsx` | Add `/settings` route, nav link (both desktop and mobile bottom nav), load seasons on mount (non-blocking) |
|
||||||
|
| `src/components/Dashboard.tsx` | Add season filter dropdown, chart annotation bands |
|
||||||
|
| `src/components/Comparison.tsx` | Add season filter as period preset |
|
||||||
|
| `src/config/chartConfig.ts` | Import and register `chartjs-plugin-annotation` in the central `ChartJS.register()` call |
|
||||||
|
| `src/locales/en.json` | Settings page labels, season filter labels |
|
||||||
|
| `src/locales/ar.json` | Arabic translations |
|
||||||
|
| `package.json` | Add `chartjs-plugin-annotation` dependency |
|
||||||
|
|
||||||
|
## Season Interface
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export interface Season {
|
||||||
|
Id?: number;
|
||||||
|
Name: string;
|
||||||
|
HijriYear: number;
|
||||||
|
StartDate: string;
|
||||||
|
EndDate: string;
|
||||||
|
Color: string;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Settings Page (`/settings`)
|
||||||
|
|
||||||
|
New route accessible from the nav bar (gear icon on desktop, gear in mobile bottom nav). Contains:
|
||||||
|
|
||||||
|
- **Seasons table**: lists all defined seasons with columns: Name, Hijri Year, Start Date, End Date, Color, Actions (edit/delete)
|
||||||
|
- **Add season form**: inline row at the bottom of the table with inputs for each field + color picker + save button
|
||||||
|
- **Edit**: click a row to edit inline
|
||||||
|
- **Delete**: delete button per row with confirmation
|
||||||
|
- **No empty state needed**: just show the empty table with the add form
|
||||||
|
|
||||||
|
## Period Filter Integration
|
||||||
|
|
||||||
|
### Dashboard
|
||||||
|
|
||||||
|
Add a "Season" select in the filters section (after Quarter). Options populated from the loaded seasons list:
|
||||||
|
- `All Seasons` (default — no date filtering from season)
|
||||||
|
- `Ramadan 1446 (Feb 28 – Mar 30, 2025)`
|
||||||
|
- `Hajj 1446 (Jun 4 – Jun 9, 2025)`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
Selecting a season sets a date range filter on the data — equivalent to filtering by start/end date. This works alongside existing year/district/channel/event filters.
|
||||||
|
|
||||||
|
Implementation: when a season is selected, filter data to `row.date >= season.StartDate && row.date <= season.EndDate`. Store the selected season ID in state (not URL params — seasons are dynamic).
|
||||||
|
|
||||||
|
### Comparison
|
||||||
|
|
||||||
|
Seasons appear as preset period options alongside months/quarters. Selecting "Ramadan 1446" sets the period dates and auto-compares with the same season name in the previous hijri year if defined (e.g. "Ramadan 1445").
|
||||||
|
|
||||||
|
## Chart Bands (Revenue Trend)
|
||||||
|
|
||||||
|
Uses `chartjs-plugin-annotation` to draw semi-transparent vertical bands on the revenue trend chart. Must be registered in `chartConfig.ts` via `ChartJS.register(Annotation)`.
|
||||||
|
|
||||||
|
For each season whose date range overlaps the chart's visible range:
|
||||||
|
- Draw a vertical box from `season.StartDate` to `season.EndDate`
|
||||||
|
- Fill with `season.Color` at 15% opacity
|
||||||
|
- Label at the top with season name + hijri year
|
||||||
|
|
||||||
|
Only the revenue trend chart gets bands (it's the only time-series chart where seasons make visual sense).
|
||||||
|
|
||||||
|
## What's NOT Changing
|
||||||
|
|
||||||
|
- ETL pipeline unchanged — seasons are a UI/presentation concern
|
||||||
|
- NocoDB DailySales schema unchanged
|
||||||
|
- All existing filters (year, district, channel, event, quarter) unchanged
|
||||||
|
- Seasons don't affect data aggregation or storage
|
||||||
19
index.html
Normal file
19
index.html
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="icon" href="/favicon.ico" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<meta name="theme-color" content="#f8fafc" />
|
||||||
|
<meta name="description" content="HiHala Data Dashboard — Event analytics, visitor tracking, and revenue insights" />
|
||||||
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
|
<link href="https://fonts.googleapis.com/css2?family=DM+Sans:wght@400;500;600&family=IBM+Plex+Sans+Arabic:wght@400;500;600;700&display=swap" rel="stylesheet">
|
||||||
|
<title>HiHala Data</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||||
|
<div id="root"></div>
|
||||||
|
<script type="module" src="/src/index.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
17001
package-lock.json
generated
17001
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
39
package.json
39
package.json
@@ -2,13 +2,13 @@
|
|||||||
"name": "hihala-dashboard",
|
"name": "hihala-dashboard",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"proxy": "http://localhost:8090",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@testing-library/dom": "^10.4.1",
|
"@testing-library/dom": "^10.4.1",
|
||||||
"@testing-library/jest-dom": "^6.9.1",
|
"@testing-library/jest-dom": "^6.9.1",
|
||||||
"@testing-library/react": "^16.3.2",
|
"@testing-library/react": "^16.3.2",
|
||||||
"@testing-library/user-event": "^13.5.0",
|
"@testing-library/user-event": "^13.5.0",
|
||||||
"chart.js": "^4.5.1",
|
"chart.js": "^4.5.1",
|
||||||
|
"chartjs-plugin-annotation": "^3.1.0",
|
||||||
"chartjs-plugin-datalabels": "^2.2.0",
|
"chartjs-plugin-datalabels": "^2.2.0",
|
||||||
"html2canvas": "^1.4.1",
|
"html2canvas": "^1.4.1",
|
||||||
"jszip": "^3.10.1",
|
"jszip": "^3.10.1",
|
||||||
@@ -16,31 +16,24 @@
|
|||||||
"react-chartjs-2": "^5.3.1",
|
"react-chartjs-2": "^5.3.1",
|
||||||
"react-dom": "^19.2.4",
|
"react-dom": "^19.2.4",
|
||||||
"react-router-dom": "^7.13.0",
|
"react-router-dom": "^7.13.0",
|
||||||
"react-scripts": "5.0.1",
|
|
||||||
"web-vitals": "^2.1.4"
|
"web-vitals": "^2.1.4"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "react-scripts start",
|
"dev": "concurrently -n server,client -c blue,green \"npm run dev:server\" \"npm run dev:client\"",
|
||||||
"build": "react-scripts build",
|
"dev:client": "vite",
|
||||||
"test": "react-scripts test",
|
"dev:server": "cd server && npm run dev",
|
||||||
"eject": "react-scripts eject"
|
"start": "vite",
|
||||||
|
"build": "vite build",
|
||||||
|
"preview": "vite preview"
|
||||||
},
|
},
|
||||||
"eslintConfig": {
|
"devDependencies": {
|
||||||
"extends": [
|
"@types/node": "^25.2.0",
|
||||||
"react-app",
|
"@types/react": "^19.2.10",
|
||||||
"react-app/jest"
|
"@types/react-dom": "^19.2.3",
|
||||||
]
|
"@types/react-router-dom": "^5.3.3",
|
||||||
},
|
"@vitejs/plugin-react": "^5.1.4",
|
||||||
"browserslist": {
|
"concurrently": "^9.2.1",
|
||||||
"production": [
|
"typescript": "^5.9.3",
|
||||||
">0.2%",
|
"vite": "^7.3.1"
|
||||||
"not dead",
|
|
||||||
"not op_mini all"
|
|
||||||
],
|
|
||||||
"development": [
|
|
||||||
"last 1 chrome version",
|
|
||||||
"last 1 firefox version",
|
|
||||||
"last 1 safari version"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,46 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8" />
|
|
||||||
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<meta name="theme-color" content="#000000" />
|
|
||||||
<meta
|
|
||||||
name="description"
|
|
||||||
content="Web site created using create-react-app"
|
|
||||||
/>
|
|
||||||
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
|
|
||||||
<!--
|
|
||||||
manifest.json provides metadata used when your web app is installed on a
|
|
||||||
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
|
|
||||||
-->
|
|
||||||
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
|
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=DM+Sans:wght@400;500;600&family=IBM+Plex+Sans+Arabic:wght@400;500;600;700&display=swap" rel="stylesheet">
|
|
||||||
<!--
|
|
||||||
Notice the use of %PUBLIC_URL% in the tags above.
|
|
||||||
It will be replaced with the URL of the `public` folder during the build.
|
|
||||||
Only files inside the `public` folder can be referenced from the HTML.
|
|
||||||
|
|
||||||
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
|
|
||||||
work correctly both with client-side routing and a non-root public URL.
|
|
||||||
Learn how to configure a non-root public URL by running `npm run build`.
|
|
||||||
-->
|
|
||||||
<title>HiHala Data – Museums</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
|
||||||
<div id="root"></div>
|
|
||||||
<!--
|
|
||||||
This HTML file is a template.
|
|
||||||
If you open it directly in the browser, you will see an empty page.
|
|
||||||
|
|
||||||
You can add webfonts, meta tags, or analytics to this file.
|
|
||||||
The build step will place the bundled scripts into the <body> tag.
|
|
||||||
|
|
||||||
To begin the development, run `npm start` or `yarn start`.
|
|
||||||
To create a production bundle, use `npm run build` or `yarn build`.
|
|
||||||
-->
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
278
scripts/nocodb-migrate.py
Normal file
278
scripts/nocodb-migrate.py
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
NocoDB Migration Script
|
||||||
|
Exports data from a source NocoDB instance and imports it into a target instance.
|
||||||
|
Handles ID remapping so FK references stay correct regardless of auto-increment offsets.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python3 scripts/nocodb-migrate.py
|
||||||
|
|
||||||
|
Configure source/target via environment variables or edit the config below.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import urllib.request
|
||||||
|
import urllib.error
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# Configuration
|
||||||
|
# ============================================
|
||||||
|
|
||||||
|
SOURCE = {
|
||||||
|
"url": "http://localhost:8090",
|
||||||
|
"token": "Crn_mZnlStJ8BjB6a1fvx7_JhiEVFPPm_gI1AwVh",
|
||||||
|
"base_id": "pqbl1a3yie3inqj",
|
||||||
|
}
|
||||||
|
|
||||||
|
TARGET = {
|
||||||
|
"url": "https://nocodb.cloudron.hihala.com",
|
||||||
|
"token": "j6DBMb9vkebA6i_tY1TtctwAToAsi_xQ3kOn9q5C",
|
||||||
|
"workspace_id": "w0b7k8g8",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Tables to migrate, in order (parents before children)
|
||||||
|
TABLES = [
|
||||||
|
{
|
||||||
|
"name": "Districts",
|
||||||
|
"columns": [
|
||||||
|
{"column_name": "Name", "title": "Name", "uidt": "SingleLineText", "pv": True},
|
||||||
|
{"column_name": "Description", "title": "Description", "uidt": "LongText"},
|
||||||
|
],
|
||||||
|
"fields": ["Name", "Description"],
|
||||||
|
"fk_mappings": {}, # No FK dependencies
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Museums",
|
||||||
|
"columns": [
|
||||||
|
{"column_name": "Code", "title": "Code", "uidt": "SingleLineText", "pv": True},
|
||||||
|
{"column_name": "Name", "title": "Name", "uidt": "SingleLineText"},
|
||||||
|
{"column_name": "Status", "title": "Status", "uidt": "SingleLineText"},
|
||||||
|
{"column_name": "DistrictId", "title": "DistrictId", "uidt": "Number"},
|
||||||
|
],
|
||||||
|
"fields": ["Code", "Name", "Status"],
|
||||||
|
"fk_mappings": {
|
||||||
|
# field_name: (source_fk_column_candidates, parent_table_name)
|
||||||
|
"DistrictId": (["DistrictId", "nc_epk____Districts_id"], "Districts"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "DailyStats",
|
||||||
|
"columns": [
|
||||||
|
{"column_name": "Date", "title": "Date", "uidt": "Date"},
|
||||||
|
{"column_name": "Visits", "title": "Visits", "uidt": "Number"},
|
||||||
|
{"column_name": "Tickets", "title": "Tickets", "uidt": "Number"},
|
||||||
|
{"column_name": "GrossRevenue", "title": "GrossRevenue", "uidt": "Number"},
|
||||||
|
{"column_name": "NetRevenue", "title": "NetRevenue", "uidt": "Decimal"},
|
||||||
|
{"column_name": "MuseumId", "title": "MuseumId", "uidt": "Number"},
|
||||||
|
],
|
||||||
|
"fields": ["Date", "Visits", "Tickets", "GrossRevenue", "NetRevenue"],
|
||||||
|
"fk_mappings": {
|
||||||
|
"MuseumId": (["MuseumId", "nc_epk____Museums_id"], "Museums"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "PilgrimStats",
|
||||||
|
"columns": [
|
||||||
|
{"column_name": "Year", "title": "Year", "uidt": "Number"},
|
||||||
|
{"column_name": "Quarter", "title": "Quarter", "uidt": "SingleLineText"},
|
||||||
|
{"column_name": "TotalPilgrims", "title": "TotalPilgrims", "uidt": "Number"},
|
||||||
|
],
|
||||||
|
"fields": ["Year", "Quarter", "TotalPilgrims"],
|
||||||
|
"fk_mappings": {},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# API Helpers
|
||||||
|
# ============================================
|
||||||
|
|
||||||
|
def api_request(base_url, token, path, method="GET", data=None):
|
||||||
|
url = f"{base_url}{path}"
|
||||||
|
headers = {"xc-token": token, "Content-Type": "application/json"}
|
||||||
|
body = json.dumps(data).encode() if data else None
|
||||||
|
req = urllib.request.Request(url, data=body, headers=headers, method=method)
|
||||||
|
try:
|
||||||
|
with urllib.request.urlopen(req) as resp:
|
||||||
|
return json.loads(resp.read().decode())
|
||||||
|
except urllib.error.HTTPError as e:
|
||||||
|
error_body = e.read().decode()
|
||||||
|
print(f" ERROR {e.code}: {error_body}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_all_records(base_url, token, table_id, limit=1000):
|
||||||
|
"""Fetch all records from a table with pagination."""
|
||||||
|
all_records = []
|
||||||
|
offset = 0
|
||||||
|
while True:
|
||||||
|
data = api_request(base_url, token, f"/api/v2/tables/{table_id}/records?limit={limit}&offset={offset}")
|
||||||
|
records = data.get("list", [])
|
||||||
|
all_records.extend(records)
|
||||||
|
if len(records) < limit:
|
||||||
|
break
|
||||||
|
offset += limit
|
||||||
|
return all_records
|
||||||
|
|
||||||
|
|
||||||
|
def insert_records(base_url, token, table_id, records, batch_size=100):
|
||||||
|
"""Insert records in batches, return list of created IDs in order."""
|
||||||
|
all_ids = []
|
||||||
|
for i in range(0, len(records), batch_size):
|
||||||
|
batch = records[i:i + batch_size]
|
||||||
|
result = api_request(base_url, token, f"/api/v2/tables/{table_id}/records", method="POST", data=batch)
|
||||||
|
if isinstance(result, list):
|
||||||
|
all_ids.extend([r["Id"] for r in result])
|
||||||
|
elif isinstance(result, dict) and "Id" in result:
|
||||||
|
all_ids.append(result["Id"])
|
||||||
|
# Brief pause between batches to avoid rate limiting
|
||||||
|
if i + batch_size < len(records):
|
||||||
|
time.sleep(0.1)
|
||||||
|
return all_ids
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# Discovery
|
||||||
|
# ============================================
|
||||||
|
|
||||||
|
def discover_tables(base_url, token, base_id):
|
||||||
|
"""Get table name → table_id mapping."""
|
||||||
|
data = api_request(base_url, token, f"/api/v2/meta/bases/{base_id}/tables")
|
||||||
|
return {t["title"]: t["id"] for t in data["list"]}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# Migration
|
||||||
|
# ============================================
|
||||||
|
|
||||||
|
def run_migration():
|
||||||
|
print("=" * 60)
|
||||||
|
print("NocoDB Migration: Source → Target")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
# Step 1: Discover source tables
|
||||||
|
print("\n[1/5] Discovering source tables...")
|
||||||
|
source_tables = discover_tables(SOURCE["url"], SOURCE["token"], SOURCE["base_id"])
|
||||||
|
for name, tid in source_tables.items():
|
||||||
|
print(f" {name}: {tid}")
|
||||||
|
|
||||||
|
# Step 2: Create target base
|
||||||
|
print("\n[2/5] Creating target base...")
|
||||||
|
base = api_request(
|
||||||
|
TARGET["url"], TARGET["token"],
|
||||||
|
f"/api/v2/meta/workspaces/{TARGET['workspace_id']}/bases/",
|
||||||
|
method="POST",
|
||||||
|
data={"title": "HiHala Dashboard"}
|
||||||
|
)
|
||||||
|
target_base_id = base["id"]
|
||||||
|
print(f" Created base: {target_base_id}")
|
||||||
|
|
||||||
|
# Step 3: Create target tables
|
||||||
|
print("\n[3/5] Creating target tables...")
|
||||||
|
target_table_ids = {}
|
||||||
|
for table_cfg in TABLES:
|
||||||
|
name = table_cfg["name"]
|
||||||
|
result = api_request(
|
||||||
|
TARGET["url"], TARGET["token"],
|
||||||
|
f"/api/v2/meta/bases/{target_base_id}/tables/",
|
||||||
|
method="POST",
|
||||||
|
data={
|
||||||
|
"table_name": name,
|
||||||
|
"title": name,
|
||||||
|
"columns": table_cfg["columns"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
target_table_ids[name] = result["id"]
|
||||||
|
print(f" {name}: {result['id']}")
|
||||||
|
|
||||||
|
# Step 4: Export source data and import with ID remapping
|
||||||
|
print("\n[4/5] Migrating data...")
|
||||||
|
# id_maps[table_name] = {old_id: new_id}
|
||||||
|
id_maps = {}
|
||||||
|
|
||||||
|
for table_cfg in TABLES:
|
||||||
|
name = table_cfg["name"]
|
||||||
|
print(f"\n --- {name} ---")
|
||||||
|
|
||||||
|
if name not in source_tables:
|
||||||
|
print(f" SKIP: not found in source")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Export from source
|
||||||
|
source_records = fetch_all_records(SOURCE["url"], SOURCE["token"], source_tables[name])
|
||||||
|
print(f" Exported {len(source_records)} records from source")
|
||||||
|
|
||||||
|
if not source_records:
|
||||||
|
id_maps[name] = {}
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Build clean records with FK remapping
|
||||||
|
clean_records = []
|
||||||
|
for r in source_records:
|
||||||
|
row = {}
|
||||||
|
# Copy plain fields
|
||||||
|
for field in table_cfg["fields"]:
|
||||||
|
if field in r:
|
||||||
|
row[field] = r[field]
|
||||||
|
|
||||||
|
# Remap FK fields
|
||||||
|
for fk_field, (source_candidates, parent_table) in table_cfg["fk_mappings"].items():
|
||||||
|
# Find the FK value from source (try multiple column name candidates)
|
||||||
|
old_fk = None
|
||||||
|
for candidate in source_candidates:
|
||||||
|
if candidate in r and r[candidate] is not None:
|
||||||
|
old_fk = r[candidate]
|
||||||
|
break
|
||||||
|
|
||||||
|
if old_fk is not None and parent_table in id_maps:
|
||||||
|
new_fk = id_maps[parent_table].get(old_fk)
|
||||||
|
if new_fk is not None:
|
||||||
|
row[fk_field] = new_fk
|
||||||
|
else:
|
||||||
|
print(f" WARNING: No mapping for {parent_table}.Id={old_fk}")
|
||||||
|
row[fk_field] = old_fk
|
||||||
|
elif old_fk is not None:
|
||||||
|
row[fk_field] = old_fk
|
||||||
|
|
||||||
|
clean_records.append(row)
|
||||||
|
|
||||||
|
# Insert into target
|
||||||
|
new_ids = insert_records(TARGET["url"], TARGET["token"], target_table_ids[name], clean_records)
|
||||||
|
print(f" Inserted {len(new_ids)} records into target")
|
||||||
|
|
||||||
|
# Build ID mapping (old_id → new_id) based on insertion order
|
||||||
|
old_ids = [r["Id"] for r in source_records]
|
||||||
|
id_maps[name] = {}
|
||||||
|
for old_id, new_id in zip(old_ids, new_ids):
|
||||||
|
id_maps[name][old_id] = new_id
|
||||||
|
|
||||||
|
if id_maps[name]:
|
||||||
|
sample = list(id_maps[name].items())[:3]
|
||||||
|
print(f" ID mapping sample: {sample}")
|
||||||
|
|
||||||
|
# Step 5: Summary
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("[5/5] Migration complete!")
|
||||||
|
print("=" * 60)
|
||||||
|
print(f"\n Target base ID: {target_base_id}")
|
||||||
|
print(f"\n Target tables:")
|
||||||
|
for name, tid in target_table_ids.items():
|
||||||
|
print(f" {name}: {tid}")
|
||||||
|
print(f"\n ID mappings:")
|
||||||
|
for name, mapping in id_maps.items():
|
||||||
|
print(f" {name}: {len(mapping)} records ({list(mapping.items())[:2]}...)")
|
||||||
|
|
||||||
|
print(f"\n Add this to your Gitea secrets:")
|
||||||
|
print(f" VITE_NOCODB_BASE_ID = {target_base_id}")
|
||||||
|
print(f"\n The VITE_NOCODB_URL and VITE_NOCODB_TOKEN secrets should point to Cloudron.")
|
||||||
|
|
||||||
|
return target_base_id, target_table_ids
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
run_migration()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\nFATAL: {e}")
|
||||||
|
sys.exit(1)
|
||||||
11
server/.env
Normal file
11
server/.env
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Salla OAuth Credentials
|
||||||
|
SALLA_CLIENT_ID=f1544fc8-115d-4481-bcb8-ac0244b4b726
|
||||||
|
SALLA_CLIENT_SECRET=c6fdbccb440e5d29638f830aaa16370515536f2d29e2ebdacbf3f3f60188f5c7
|
||||||
|
SALLA_REDIRECT_URI=http://localhost:3001/auth/callback
|
||||||
|
|
||||||
|
# Server port
|
||||||
|
SALLA_SERVER_PORT=3001
|
||||||
|
|
||||||
|
# After OAuth, these will be populated automatically
|
||||||
|
# SALLA_ACCESS_TOKEN=
|
||||||
|
# SALLA_REFRESH_TOKEN=
|
||||||
20
server/.env.example
Normal file
20
server/.env.example
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Server
|
||||||
|
SERVER_PORT=3002
|
||||||
|
|
||||||
|
# Hono ERP API (museum sales data)
|
||||||
|
ERP_API_URL=https://hono-erp.azurewebsites.net
|
||||||
|
ERP_API_CODE=your-api-function-key
|
||||||
|
ERP_USERNAME=your-username
|
||||||
|
ERP_PASSWORD=your-password
|
||||||
|
|
||||||
|
# NocoDB (for ETL writes)
|
||||||
|
NOCODB_URL=http://localhost:8090
|
||||||
|
NOCODB_TOKEN=your-token
|
||||||
|
NOCODB_BASE_ID=your-base-id
|
||||||
|
|
||||||
|
# ETL sync secret (for cron auth)
|
||||||
|
ETL_SECRET=your-secret-here
|
||||||
|
|
||||||
|
# Auth
|
||||||
|
ADMIN_PIN=your-pin-code
|
||||||
|
SESSION_SECRET=your-random-session-secret
|
||||||
1
server/node_modules/.bin/mime
generated
vendored
Symbolic link
1
server/node_modules/.bin/mime
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../mime/cli.js
|
||||||
968
server/node_modules/.package-lock.json
generated
vendored
Normal file
968
server/node_modules/.package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,968 @@
|
|||||||
|
{
|
||||||
|
"name": "hihala-salla-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"node_modules/accepts": {
|
||||||
|
"version": "1.3.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
|
||||||
|
"integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"mime-types": "~2.1.34",
|
||||||
|
"negotiator": "0.6.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/array-flatten": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/asynckit": {
|
||||||
|
"version": "0.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
|
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/axios": {
|
||||||
|
"version": "1.13.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz",
|
||||||
|
"integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"follow-redirects": "^1.15.11",
|
||||||
|
"form-data": "^4.0.5",
|
||||||
|
"proxy-from-env": "^1.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/body-parser": {
|
||||||
|
"version": "1.20.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
|
||||||
|
"integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"bytes": "~3.1.2",
|
||||||
|
"content-type": "~1.0.5",
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"depd": "2.0.0",
|
||||||
|
"destroy": "~1.2.0",
|
||||||
|
"http-errors": "~2.0.1",
|
||||||
|
"iconv-lite": "~0.4.24",
|
||||||
|
"on-finished": "~2.4.1",
|
||||||
|
"qs": "~6.14.0",
|
||||||
|
"raw-body": "~2.5.3",
|
||||||
|
"type-is": "~1.6.18",
|
||||||
|
"unpipe": "~1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8",
|
||||||
|
"npm": "1.2.8000 || >= 1.4.16"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/bytes": {
|
||||||
|
"version": "3.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||||
|
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/call-bind-apply-helpers": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"function-bind": "^1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/call-bound": {
|
||||||
|
"version": "1.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
|
||||||
|
"integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bind-apply-helpers": "^1.0.2",
|
||||||
|
"get-intrinsic": "^1.3.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/combined-stream": {
|
||||||
|
"version": "1.0.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"delayed-stream": "~1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/content-disposition": {
|
||||||
|
"version": "0.5.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
||||||
|
"integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"safe-buffer": "5.2.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/content-type": {
|
||||||
|
"version": "1.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
|
||||||
|
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/cookie": {
|
||||||
|
"version": "0.7.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
|
||||||
|
"integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/cookie-signature": {
|
||||||
|
"version": "1.0.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.7.tgz",
|
||||||
|
"integrity": "sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/cors": {
|
||||||
|
"version": "2.8.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/cors/-/cors-2.8.6.tgz",
|
||||||
|
"integrity": "sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"object-assign": "^4",
|
||||||
|
"vary": "^1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/express"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/debug": {
|
||||||
|
"version": "2.6.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||||
|
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"ms": "2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/delayed-stream": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.4.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/depd": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/destroy": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8",
|
||||||
|
"npm": "1.2.8000 || >= 1.4.16"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/dotenv": {
|
||||||
|
"version": "16.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz",
|
||||||
|
"integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==",
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://dotenvx.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/dunder-proto": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bind-apply-helpers": "^1.0.1",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"gopd": "^1.2.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ee-first": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/encodeurl": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/es-define-property": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/es-errors": {
|
||||||
|
"version": "1.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
|
||||||
|
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/es-object-atoms": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/es-set-tostringtag": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"get-intrinsic": "^1.2.6",
|
||||||
|
"has-tostringtag": "^1.0.2",
|
||||||
|
"hasown": "^2.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/escape-html": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/etag": {
|
||||||
|
"version": "1.8.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
|
||||||
|
"integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/express": {
|
||||||
|
"version": "4.22.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
|
||||||
|
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"accepts": "~1.3.8",
|
||||||
|
"array-flatten": "1.1.1",
|
||||||
|
"body-parser": "~1.20.3",
|
||||||
|
"content-disposition": "~0.5.4",
|
||||||
|
"content-type": "~1.0.4",
|
||||||
|
"cookie": "~0.7.1",
|
||||||
|
"cookie-signature": "~1.0.6",
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"depd": "2.0.0",
|
||||||
|
"encodeurl": "~2.0.0",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"etag": "~1.8.1",
|
||||||
|
"finalhandler": "~1.3.1",
|
||||||
|
"fresh": "~0.5.2",
|
||||||
|
"http-errors": "~2.0.0",
|
||||||
|
"merge-descriptors": "1.0.3",
|
||||||
|
"methods": "~1.1.2",
|
||||||
|
"on-finished": "~2.4.1",
|
||||||
|
"parseurl": "~1.3.3",
|
||||||
|
"path-to-regexp": "~0.1.12",
|
||||||
|
"proxy-addr": "~2.0.7",
|
||||||
|
"qs": "~6.14.0",
|
||||||
|
"range-parser": "~1.2.1",
|
||||||
|
"safe-buffer": "5.2.1",
|
||||||
|
"send": "~0.19.0",
|
||||||
|
"serve-static": "~1.16.2",
|
||||||
|
"setprototypeof": "1.2.0",
|
||||||
|
"statuses": "~2.0.1",
|
||||||
|
"type-is": "~1.6.18",
|
||||||
|
"utils-merge": "1.0.1",
|
||||||
|
"vary": "~1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/express"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/finalhandler": {
|
||||||
|
"version": "1.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.2.tgz",
|
||||||
|
"integrity": "sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"encodeurl": "~2.0.0",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"on-finished": "~2.4.1",
|
||||||
|
"parseurl": "~1.3.3",
|
||||||
|
"statuses": "~2.0.2",
|
||||||
|
"unpipe": "~1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/follow-redirects": {
|
||||||
|
"version": "1.15.11",
|
||||||
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
|
||||||
|
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "individual",
|
||||||
|
"url": "https://github.com/sponsors/RubenVerborgh"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"debug": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/form-data": {
|
||||||
|
"version": "4.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
|
||||||
|
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"asynckit": "^0.4.0",
|
||||||
|
"combined-stream": "^1.0.8",
|
||||||
|
"es-set-tostringtag": "^2.1.0",
|
||||||
|
"hasown": "^2.0.2",
|
||||||
|
"mime-types": "^2.1.12"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/forwarded": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||||
|
"integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/fresh": {
|
||||||
|
"version": "0.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
|
||||||
|
"integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/function-bind": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/get-intrinsic": {
|
||||||
|
"version": "1.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
|
||||||
|
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bind-apply-helpers": "^1.0.2",
|
||||||
|
"es-define-property": "^1.0.1",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"es-object-atoms": "^1.1.1",
|
||||||
|
"function-bind": "^1.1.2",
|
||||||
|
"get-proto": "^1.0.1",
|
||||||
|
"gopd": "^1.2.0",
|
||||||
|
"has-symbols": "^1.1.0",
|
||||||
|
"hasown": "^2.0.2",
|
||||||
|
"math-intrinsics": "^1.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/get-proto": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"dunder-proto": "^1.0.1",
|
||||||
|
"es-object-atoms": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/gopd": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/has-symbols": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/has-tostringtag": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"has-symbols": "^1.0.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/hasown": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"function-bind": "^1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/http-errors": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"depd": "~2.0.0",
|
||||||
|
"inherits": "~2.0.4",
|
||||||
|
"setprototypeof": "~1.2.0",
|
||||||
|
"statuses": "~2.0.2",
|
||||||
|
"toidentifier": "~1.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/express"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/iconv-lite": {
|
||||||
|
"version": "0.4.24",
|
||||||
|
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||||
|
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"safer-buffer": ">= 2.1.2 < 3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/inherits": {
|
||||||
|
"version": "2.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||||
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
|
"node_modules/ipaddr.js": {
|
||||||
|
"version": "1.9.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
|
||||||
|
"integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/math-intrinsics": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/media-typer": {
|
||||||
|
"version": "0.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||||
|
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/merge-descriptors": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/methods": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mime": {
|
||||||
|
"version": "1.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
||||||
|
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"mime": "cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mime-db": {
|
||||||
|
"version": "1.52.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||||
|
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mime-types": {
|
||||||
|
"version": "2.1.35",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||||
|
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"mime-db": "1.52.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ms": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/negotiator": {
|
||||||
|
"version": "0.6.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
|
||||||
|
"integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/object-assign": {
|
||||||
|
"version": "4.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||||
|
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/object-inspect": {
|
||||||
|
"version": "1.13.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
|
||||||
|
"integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/on-finished": {
|
||||||
|
"version": "2.4.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
|
||||||
|
"integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"ee-first": "1.1.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/parseurl": {
|
||||||
|
"version": "1.3.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
|
||||||
|
"integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/path-to-regexp": {
|
||||||
|
"version": "0.1.12",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
|
||||||
|
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/proxy-addr": {
|
||||||
|
"version": "2.0.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
|
||||||
|
"integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"forwarded": "0.2.0",
|
||||||
|
"ipaddr.js": "1.9.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/proxy-from-env": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/qs": {
|
||||||
|
"version": "6.14.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.2.tgz",
|
||||||
|
"integrity": "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q==",
|
||||||
|
"license": "BSD-3-Clause",
|
||||||
|
"dependencies": {
|
||||||
|
"side-channel": "^1.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.6"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/range-parser": {
|
||||||
|
"version": "1.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
|
||||||
|
"integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/raw-body": {
|
||||||
|
"version": "2.5.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz",
|
||||||
|
"integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"bytes": "~3.1.2",
|
||||||
|
"http-errors": "~2.0.1",
|
||||||
|
"iconv-lite": "~0.4.24",
|
||||||
|
"unpipe": "~1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/safe-buffer": {
|
||||||
|
"version": "5.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||||
|
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/feross"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "patreon",
|
||||||
|
"url": "https://www.patreon.com/feross"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "consulting",
|
||||||
|
"url": "https://feross.org/support"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/safer-buffer": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/send": {
|
||||||
|
"version": "0.19.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz",
|
||||||
|
"integrity": "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"depd": "2.0.0",
|
||||||
|
"destroy": "1.2.0",
|
||||||
|
"encodeurl": "~2.0.0",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"etag": "~1.8.1",
|
||||||
|
"fresh": "~0.5.2",
|
||||||
|
"http-errors": "~2.0.1",
|
||||||
|
"mime": "1.6.0",
|
||||||
|
"ms": "2.1.3",
|
||||||
|
"on-finished": "~2.4.1",
|
||||||
|
"range-parser": "~1.2.1",
|
||||||
|
"statuses": "~2.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/send/node_modules/ms": {
|
||||||
|
"version": "2.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||||
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/serve-static": {
|
||||||
|
"version": "1.16.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.3.tgz",
|
||||||
|
"integrity": "sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"encodeurl": "~2.0.0",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"parseurl": "~1.3.3",
|
||||||
|
"send": "~0.19.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/setprototypeof": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
|
"node_modules/side-channel": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"object-inspect": "^1.13.3",
|
||||||
|
"side-channel-list": "^1.0.0",
|
||||||
|
"side-channel-map": "^1.0.1",
|
||||||
|
"side-channel-weakmap": "^1.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/side-channel-list": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"object-inspect": "^1.13.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/side-channel-map": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bound": "^1.0.2",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"get-intrinsic": "^1.2.5",
|
||||||
|
"object-inspect": "^1.13.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/side-channel-weakmap": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bound": "^1.0.2",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"get-intrinsic": "^1.2.5",
|
||||||
|
"object-inspect": "^1.13.3",
|
||||||
|
"side-channel-map": "^1.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/statuses": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/toidentifier": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/type-is": {
|
||||||
|
"version": "1.6.18",
|
||||||
|
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
||||||
|
"integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"media-typer": "0.3.0",
|
||||||
|
"mime-types": "~2.1.24"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/unpipe": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/utils-merge": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/vary": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
243
server/node_modules/accepts/HISTORY.md
generated
vendored
Normal file
243
server/node_modules/accepts/HISTORY.md
generated
vendored
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
1.3.8 / 2022-02-02
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.34
|
||||||
|
- deps: mime-db@~1.51.0
|
||||||
|
* deps: negotiator@0.6.3
|
||||||
|
|
||||||
|
1.3.7 / 2019-04-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.6.2
|
||||||
|
- Fix sorting charset, encoding, and language with extra parameters
|
||||||
|
|
||||||
|
1.3.6 / 2019-04-28
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.24
|
||||||
|
- deps: mime-db@~1.40.0
|
||||||
|
|
||||||
|
1.3.5 / 2018-02-28
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.18
|
||||||
|
- deps: mime-db@~1.33.0
|
||||||
|
|
||||||
|
1.3.4 / 2017-08-22
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.16
|
||||||
|
- deps: mime-db@~1.29.0
|
||||||
|
|
||||||
|
1.3.3 / 2016-05-02
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.11
|
||||||
|
- deps: mime-db@~1.23.0
|
||||||
|
* deps: negotiator@0.6.1
|
||||||
|
- perf: improve `Accept` parsing speed
|
||||||
|
- perf: improve `Accept-Charset` parsing speed
|
||||||
|
- perf: improve `Accept-Encoding` parsing speed
|
||||||
|
- perf: improve `Accept-Language` parsing speed
|
||||||
|
|
||||||
|
1.3.2 / 2016-03-08
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.10
|
||||||
|
- Fix extension of `application/dash+xml`
|
||||||
|
- Update primary extension for `audio/mp4`
|
||||||
|
- deps: mime-db@~1.22.0
|
||||||
|
|
||||||
|
1.3.1 / 2016-01-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.9
|
||||||
|
- deps: mime-db@~1.21.0
|
||||||
|
|
||||||
|
1.3.0 / 2015-09-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.7
|
||||||
|
- deps: mime-db@~1.19.0
|
||||||
|
* deps: negotiator@0.6.0
|
||||||
|
- Fix including type extensions in parameters in `Accept` parsing
|
||||||
|
- Fix parsing `Accept` parameters with quoted equals
|
||||||
|
- Fix parsing `Accept` parameters with quoted semicolons
|
||||||
|
- Lazy-load modules from main entry point
|
||||||
|
- perf: delay type concatenation until needed
|
||||||
|
- perf: enable strict mode
|
||||||
|
- perf: hoist regular expressions
|
||||||
|
- perf: remove closures getting spec properties
|
||||||
|
- perf: remove a closure from media type parsing
|
||||||
|
- perf: remove property delete from media type parsing
|
||||||
|
|
||||||
|
1.2.13 / 2015-09-06
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.6
|
||||||
|
- deps: mime-db@~1.18.0
|
||||||
|
|
||||||
|
1.2.12 / 2015-07-30
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.4
|
||||||
|
- deps: mime-db@~1.16.0
|
||||||
|
|
||||||
|
1.2.11 / 2015-07-16
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.3
|
||||||
|
- deps: mime-db@~1.15.0
|
||||||
|
|
||||||
|
1.2.10 / 2015-07-01
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.2
|
||||||
|
- deps: mime-db@~1.14.0
|
||||||
|
|
||||||
|
1.2.9 / 2015-06-08
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.1
|
||||||
|
- perf: fix deopt during mapping
|
||||||
|
|
||||||
|
1.2.8 / 2015-06-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.0
|
||||||
|
- deps: mime-db@~1.13.0
|
||||||
|
* perf: avoid argument reassignment & argument slice
|
||||||
|
* perf: avoid negotiator recursive construction
|
||||||
|
* perf: enable strict mode
|
||||||
|
* perf: remove unnecessary bitwise operator
|
||||||
|
|
||||||
|
1.2.7 / 2015-05-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.5.3
|
||||||
|
- Fix media type parameter matching to be case-insensitive
|
||||||
|
|
||||||
|
1.2.6 / 2015-05-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.11
|
||||||
|
- deps: mime-db@~1.9.1
|
||||||
|
* deps: negotiator@0.5.2
|
||||||
|
- Fix comparing media types with quoted values
|
||||||
|
- Fix splitting media types with quoted commas
|
||||||
|
|
||||||
|
1.2.5 / 2015-03-13
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.10
|
||||||
|
- deps: mime-db@~1.8.0
|
||||||
|
|
||||||
|
1.2.4 / 2015-02-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Support Node.js 0.6
|
||||||
|
* deps: mime-types@~2.0.9
|
||||||
|
- deps: mime-db@~1.7.0
|
||||||
|
* deps: negotiator@0.5.1
|
||||||
|
- Fix preference sorting to be stable for long acceptable lists
|
||||||
|
|
||||||
|
1.2.3 / 2015-01-31
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.8
|
||||||
|
- deps: mime-db@~1.6.0
|
||||||
|
|
||||||
|
1.2.2 / 2014-12-30
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.7
|
||||||
|
- deps: mime-db@~1.5.0
|
||||||
|
|
||||||
|
1.2.1 / 2014-12-30
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.5
|
||||||
|
- deps: mime-db@~1.3.1
|
||||||
|
|
||||||
|
1.2.0 / 2014-12-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.5.0
|
||||||
|
- Fix list return order when large accepted list
|
||||||
|
- Fix missing identity encoding when q=0 exists
|
||||||
|
- Remove dynamic building of Negotiator class
|
||||||
|
|
||||||
|
1.1.4 / 2014-12-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.4
|
||||||
|
- deps: mime-db@~1.3.0
|
||||||
|
|
||||||
|
1.1.3 / 2014-11-09
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.3
|
||||||
|
- deps: mime-db@~1.2.0
|
||||||
|
|
||||||
|
1.1.2 / 2014-10-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.9
|
||||||
|
- Fix error when media type has invalid parameter
|
||||||
|
|
||||||
|
1.1.1 / 2014-09-28
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.2
|
||||||
|
- deps: mime-db@~1.1.0
|
||||||
|
* deps: negotiator@0.4.8
|
||||||
|
- Fix all negotiations to be case-insensitive
|
||||||
|
- Stable sort preferences of same quality according to client order
|
||||||
|
|
||||||
|
1.1.0 / 2014-09-02
|
||||||
|
==================
|
||||||
|
|
||||||
|
* update `mime-types`
|
||||||
|
|
||||||
|
1.0.7 / 2014-07-04
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Fix wrong type returned from `type` when match after unknown extension
|
||||||
|
|
||||||
|
1.0.6 / 2014-06-24
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.7
|
||||||
|
|
||||||
|
1.0.5 / 2014-06-20
|
||||||
|
==================
|
||||||
|
|
||||||
|
* fix crash when unknown extension given
|
||||||
|
|
||||||
|
1.0.4 / 2014-06-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* use `mime-types`
|
||||||
|
|
||||||
|
1.0.3 / 2014-06-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.6
|
||||||
|
- Order by specificity when quality is the same
|
||||||
|
|
||||||
|
1.0.2 / 2014-05-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Fix interpretation when header not in request
|
||||||
|
* deps: pin negotiator@0.4.5
|
||||||
|
|
||||||
|
1.0.1 / 2014-01-18
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Identity encoding isn't always acceptable
|
||||||
|
* deps: negotiator@~0.4.0
|
||||||
|
|
||||||
|
1.0.0 / 2013-12-27
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Genesis
|
||||||
23
server/node_modules/accepts/LICENSE
generated
vendored
Normal file
23
server/node_modules/accepts/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
(The MIT License)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||||
|
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
'Software'), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
140
server/node_modules/accepts/README.md
generated
vendored
Normal file
140
server/node_modules/accepts/README.md
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
# accepts
|
||||||
|
|
||||||
|
[![NPM Version][npm-version-image]][npm-url]
|
||||||
|
[![NPM Downloads][npm-downloads-image]][npm-url]
|
||||||
|
[![Node.js Version][node-version-image]][node-version-url]
|
||||||
|
[![Build Status][github-actions-ci-image]][github-actions-ci-url]
|
||||||
|
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||||
|
|
||||||
|
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator).
|
||||||
|
Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
|
||||||
|
|
||||||
|
In addition to negotiator, it allows:
|
||||||
|
|
||||||
|
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])`
|
||||||
|
as well as `('text/html', 'application/json')`.
|
||||||
|
- Allows type shorthands such as `json`.
|
||||||
|
- Returns `false` when no types match
|
||||||
|
- Treats non-existent headers as `*`
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This is a [Node.js](https://nodejs.org/en/) module available through the
|
||||||
|
[npm registry](https://www.npmjs.com/). Installation is done using the
|
||||||
|
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install accepts
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
```js
|
||||||
|
var accepts = require('accepts')
|
||||||
|
```
|
||||||
|
|
||||||
|
### accepts(req)
|
||||||
|
|
||||||
|
Create a new `Accepts` object for the given `req`.
|
||||||
|
|
||||||
|
#### .charset(charsets)
|
||||||
|
|
||||||
|
Return the first accepted charset. If nothing in `charsets` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .charsets()
|
||||||
|
|
||||||
|
Return the charsets that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .encoding(encodings)
|
||||||
|
|
||||||
|
Return the first accepted encoding. If nothing in `encodings` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .encodings()
|
||||||
|
|
||||||
|
Return the encodings that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .language(languages)
|
||||||
|
|
||||||
|
Return the first accepted language. If nothing in `languages` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .languages()
|
||||||
|
|
||||||
|
Return the languages that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .type(types)
|
||||||
|
|
||||||
|
Return the first accepted type (and it is returned as the same text as what
|
||||||
|
appears in the `types` array). If nothing in `types` is accepted, then `false`
|
||||||
|
is returned.
|
||||||
|
|
||||||
|
The `types` array can contain full MIME types or file extensions. Any value
|
||||||
|
that is not a full MIME types is passed to `require('mime-types').lookup`.
|
||||||
|
|
||||||
|
#### .types()
|
||||||
|
|
||||||
|
Return the types that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Simple type negotiation
|
||||||
|
|
||||||
|
This simple example shows how to use `accepts` to return a different typed
|
||||||
|
respond body based on what the client wants to accept. The server lists it's
|
||||||
|
preferences in order and will get back the best match between the client and
|
||||||
|
server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var accepts = require('accepts')
|
||||||
|
var http = require('http')
|
||||||
|
|
||||||
|
function app (req, res) {
|
||||||
|
var accept = accepts(req)
|
||||||
|
|
||||||
|
// the order of this list is significant; should be server preferred order
|
||||||
|
switch (accept.type(['json', 'html'])) {
|
||||||
|
case 'json':
|
||||||
|
res.setHeader('Content-Type', 'application/json')
|
||||||
|
res.write('{"hello":"world!"}')
|
||||||
|
break
|
||||||
|
case 'html':
|
||||||
|
res.setHeader('Content-Type', 'text/html')
|
||||||
|
res.write('<b>hello, world!</b>')
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
// the fallback is text/plain, so no need to specify it above
|
||||||
|
res.setHeader('Content-Type', 'text/plain')
|
||||||
|
res.write('hello, world!')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
res.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
http.createServer(app).listen(3000)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can test this out with the cURL program:
|
||||||
|
```sh
|
||||||
|
curl -I -H'Accept: text/html' http://localhost:3000/
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
[MIT](LICENSE)
|
||||||
|
|
||||||
|
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master
|
||||||
|
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master
|
||||||
|
[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci
|
||||||
|
[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml
|
||||||
|
[node-version-image]: https://badgen.net/npm/node/accepts
|
||||||
|
[node-version-url]: https://nodejs.org/en/download
|
||||||
|
[npm-downloads-image]: https://badgen.net/npm/dm/accepts
|
||||||
|
[npm-url]: https://npmjs.org/package/accepts
|
||||||
|
[npm-version-image]: https://badgen.net/npm/v/accepts
|
||||||
238
server/node_modules/accepts/index.js
generated
vendored
Normal file
238
server/node_modules/accepts/index.js
generated
vendored
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
/*!
|
||||||
|
* accepts
|
||||||
|
* Copyright(c) 2014 Jonathan Ong
|
||||||
|
* Copyright(c) 2015 Douglas Christopher Wilson
|
||||||
|
* MIT Licensed
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var Negotiator = require('negotiator')
|
||||||
|
var mime = require('mime-types')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = Accepts
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Accepts object for the given req.
|
||||||
|
*
|
||||||
|
* @param {object} req
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function Accepts (req) {
|
||||||
|
if (!(this instanceof Accepts)) {
|
||||||
|
return new Accepts(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.headers = req.headers
|
||||||
|
this.negotiator = new Negotiator(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the given `type(s)` is acceptable, returning
|
||||||
|
* the best match when true, otherwise `undefined`, in which
|
||||||
|
* case you should respond with 406 "Not Acceptable".
|
||||||
|
*
|
||||||
|
* The `type` value may be a single mime type string
|
||||||
|
* such as "application/json", the extension name
|
||||||
|
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
|
||||||
|
* or array is given the _best_ match, if any is returned.
|
||||||
|
*
|
||||||
|
* Examples:
|
||||||
|
*
|
||||||
|
* // Accept: text/html
|
||||||
|
* this.types('html');
|
||||||
|
* // => "html"
|
||||||
|
*
|
||||||
|
* // Accept: text/*, application/json
|
||||||
|
* this.types('html');
|
||||||
|
* // => "html"
|
||||||
|
* this.types('text/html');
|
||||||
|
* // => "text/html"
|
||||||
|
* this.types('json', 'text');
|
||||||
|
* // => "json"
|
||||||
|
* this.types('application/json');
|
||||||
|
* // => "application/json"
|
||||||
|
*
|
||||||
|
* // Accept: text/*, application/json
|
||||||
|
* this.types('image/png');
|
||||||
|
* this.types('png');
|
||||||
|
* // => undefined
|
||||||
|
*
|
||||||
|
* // Accept: text/*;q=.5, application/json
|
||||||
|
* this.types(['html', 'json']);
|
||||||
|
* this.types('html', 'json');
|
||||||
|
* // => "json"
|
||||||
|
*
|
||||||
|
* @param {String|Array} types...
|
||||||
|
* @return {String|Array|Boolean}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.type =
|
||||||
|
Accepts.prototype.types = function (types_) {
|
||||||
|
var types = types_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (types && !Array.isArray(types)) {
|
||||||
|
types = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < types.length; i++) {
|
||||||
|
types[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no types, return all requested types
|
||||||
|
if (!types || types.length === 0) {
|
||||||
|
return this.negotiator.mediaTypes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// no accept header, return first given type
|
||||||
|
if (!this.headers.accept) {
|
||||||
|
return types[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
var mimes = types.map(extToMime)
|
||||||
|
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime))
|
||||||
|
var first = accepts[0]
|
||||||
|
|
||||||
|
return first
|
||||||
|
? types[mimes.indexOf(first)]
|
||||||
|
: false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted encodings or best fit based on `encodings`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Encoding: gzip, deflate`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['gzip', 'deflate']
|
||||||
|
*
|
||||||
|
* @param {String|Array} encodings...
|
||||||
|
* @return {String|Array}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.encoding =
|
||||||
|
Accepts.prototype.encodings = function (encodings_) {
|
||||||
|
var encodings = encodings_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (encodings && !Array.isArray(encodings)) {
|
||||||
|
encodings = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < encodings.length; i++) {
|
||||||
|
encodings[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no encodings, return all requested encodings
|
||||||
|
if (!encodings || encodings.length === 0) {
|
||||||
|
return this.negotiator.encodings()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.encodings(encodings)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted charsets or best fit based on `charsets`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['utf-8', 'utf-7', 'iso-8859-1']
|
||||||
|
*
|
||||||
|
* @param {String|Array} charsets...
|
||||||
|
* @return {String|Array}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.charset =
|
||||||
|
Accepts.prototype.charsets = function (charsets_) {
|
||||||
|
var charsets = charsets_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (charsets && !Array.isArray(charsets)) {
|
||||||
|
charsets = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < charsets.length; i++) {
|
||||||
|
charsets[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no charsets, return all requested charsets
|
||||||
|
if (!charsets || charsets.length === 0) {
|
||||||
|
return this.negotiator.charsets()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.charsets(charsets)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted languages or best fit based on `langs`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Language: en;q=0.8, es, pt`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['es', 'pt', 'en']
|
||||||
|
*
|
||||||
|
* @param {String|Array} langs...
|
||||||
|
* @return {Array|String}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.lang =
|
||||||
|
Accepts.prototype.langs =
|
||||||
|
Accepts.prototype.language =
|
||||||
|
Accepts.prototype.languages = function (languages_) {
|
||||||
|
var languages = languages_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (languages && !Array.isArray(languages)) {
|
||||||
|
languages = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < languages.length; i++) {
|
||||||
|
languages[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no languages, return all requested languages
|
||||||
|
if (!languages || languages.length === 0) {
|
||||||
|
return this.negotiator.languages()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.languages(languages)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert extnames to mime.
|
||||||
|
*
|
||||||
|
* @param {String} type
|
||||||
|
* @return {String}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function extToMime (type) {
|
||||||
|
return type.indexOf('/') === -1
|
||||||
|
? mime.lookup(type)
|
||||||
|
: type
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if mime is valid.
|
||||||
|
*
|
||||||
|
* @param {String} type
|
||||||
|
* @return {String}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function validMime (type) {
|
||||||
|
return typeof type === 'string'
|
||||||
|
}
|
||||||
47
server/node_modules/accepts/package.json
generated
vendored
Normal file
47
server/node_modules/accepts/package.json
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
{
|
||||||
|
"name": "accepts",
|
||||||
|
"description": "Higher-level content negotiation",
|
||||||
|
"version": "1.3.8",
|
||||||
|
"contributors": [
|
||||||
|
"Douglas Christopher Wilson <doug@somethingdoug.com>",
|
||||||
|
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "jshttp/accepts",
|
||||||
|
"dependencies": {
|
||||||
|
"mime-types": "~2.1.34",
|
||||||
|
"negotiator": "0.6.3"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"deep-equal": "1.0.1",
|
||||||
|
"eslint": "7.32.0",
|
||||||
|
"eslint-config-standard": "14.1.1",
|
||||||
|
"eslint-plugin-import": "2.25.4",
|
||||||
|
"eslint-plugin-markdown": "2.2.1",
|
||||||
|
"eslint-plugin-node": "11.1.0",
|
||||||
|
"eslint-plugin-promise": "4.3.1",
|
||||||
|
"eslint-plugin-standard": "4.1.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"nyc": "15.1.0"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"LICENSE",
|
||||||
|
"HISTORY.md",
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"lint": "eslint .",
|
||||||
|
"test": "mocha --reporter spec --check-leaks --bail test/",
|
||||||
|
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
|
||||||
|
"test-cov": "nyc --reporter=html --reporter=text npm test"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"content",
|
||||||
|
"negotiation",
|
||||||
|
"accept",
|
||||||
|
"accepts"
|
||||||
|
]
|
||||||
|
}
|
||||||
21
server/node_modules/array-flatten/LICENSE
generated
vendored
Normal file
21
server/node_modules/array-flatten/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
43
server/node_modules/array-flatten/README.md
generated
vendored
Normal file
43
server/node_modules/array-flatten/README.md
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# Array Flatten
|
||||||
|
|
||||||
|
[![NPM version][npm-image]][npm-url]
|
||||||
|
[![NPM downloads][downloads-image]][downloads-url]
|
||||||
|
[![Build status][travis-image]][travis-url]
|
||||||
|
[![Test coverage][coveralls-image]][coveralls-url]
|
||||||
|
|
||||||
|
> Flatten an array of nested arrays into a single flat array. Accepts an optional depth.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install array-flatten --save
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var flatten = require('array-flatten')
|
||||||
|
|
||||||
|
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9])
|
||||||
|
//=> [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
|
|
||||||
|
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2)
|
||||||
|
//=> [1, 2, 3, [4, [5], 6], 7, 8, 9]
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
flatten(arguments) //=> [1, 2, 3]
|
||||||
|
})(1, [2, 3])
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
||||||
|
|
||||||
|
[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat
|
||||||
|
[npm-url]: https://npmjs.org/package/array-flatten
|
||||||
|
[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat
|
||||||
|
[downloads-url]: https://npmjs.org/package/array-flatten
|
||||||
|
[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat
|
||||||
|
[travis-url]: https://travis-ci.org/blakeembrey/array-flatten
|
||||||
|
[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat
|
||||||
|
[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master
|
||||||
64
server/node_modules/array-flatten/array-flatten.js
generated
vendored
Normal file
64
server/node_modules/array-flatten/array-flatten.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expose `arrayFlatten`.
|
||||||
|
*/
|
||||||
|
module.exports = arrayFlatten
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursive flatten function with depth.
|
||||||
|
*
|
||||||
|
* @param {Array} array
|
||||||
|
* @param {Array} result
|
||||||
|
* @param {Number} depth
|
||||||
|
* @return {Array}
|
||||||
|
*/
|
||||||
|
function flattenWithDepth (array, result, depth) {
|
||||||
|
for (var i = 0; i < array.length; i++) {
|
||||||
|
var value = array[i]
|
||||||
|
|
||||||
|
if (depth > 0 && Array.isArray(value)) {
|
||||||
|
flattenWithDepth(value, result, depth - 1)
|
||||||
|
} else {
|
||||||
|
result.push(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursive flatten function. Omitting depth is slightly faster.
|
||||||
|
*
|
||||||
|
* @param {Array} array
|
||||||
|
* @param {Array} result
|
||||||
|
* @return {Array}
|
||||||
|
*/
|
||||||
|
function flattenForever (array, result) {
|
||||||
|
for (var i = 0; i < array.length; i++) {
|
||||||
|
var value = array[i]
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
flattenForever(value, result)
|
||||||
|
} else {
|
||||||
|
result.push(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flatten an array, with the ability to define a depth.
|
||||||
|
*
|
||||||
|
* @param {Array} array
|
||||||
|
* @param {Number} depth
|
||||||
|
* @return {Array}
|
||||||
|
*/
|
||||||
|
function arrayFlatten (array, depth) {
|
||||||
|
if (depth == null) {
|
||||||
|
return flattenForever(array, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
return flattenWithDepth(array, [], depth)
|
||||||
|
}
|
||||||
39
server/node_modules/array-flatten/package.json
generated
vendored
Normal file
39
server/node_modules/array-flatten/package.json
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"name": "array-flatten",
|
||||||
|
"version": "1.1.1",
|
||||||
|
"description": "Flatten an array of nested arrays into a single flat array",
|
||||||
|
"main": "array-flatten.js",
|
||||||
|
"files": [
|
||||||
|
"array-flatten.js",
|
||||||
|
"LICENSE"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"test": "istanbul cover _mocha -- -R spec"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/blakeembrey/array-flatten.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"array",
|
||||||
|
"flatten",
|
||||||
|
"arguments",
|
||||||
|
"depth"
|
||||||
|
],
|
||||||
|
"author": {
|
||||||
|
"name": "Blake Embrey",
|
||||||
|
"email": "hello@blakeembrey.com",
|
||||||
|
"url": "http://blakeembrey.me"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/blakeembrey/array-flatten/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/blakeembrey/array-flatten",
|
||||||
|
"devDependencies": {
|
||||||
|
"istanbul": "^0.3.13",
|
||||||
|
"mocha": "^2.2.4",
|
||||||
|
"pre-commit": "^1.0.7",
|
||||||
|
"standard": "^3.7.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
21
server/node_modules/asynckit/LICENSE
generated
vendored
Normal file
21
server/node_modules/asynckit/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2016 Alex Indigo
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
233
server/node_modules/asynckit/README.md
generated
vendored
Normal file
233
server/node_modules/asynckit/README.md
generated
vendored
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
# asynckit [](https://www.npmjs.com/package/asynckit)
|
||||||
|
|
||||||
|
Minimal async jobs utility library, with streams support.
|
||||||
|
|
||||||
|
[](https://travis-ci.org/alexindigo/asynckit)
|
||||||
|
[](https://travis-ci.org/alexindigo/asynckit)
|
||||||
|
[](https://ci.appveyor.com/project/alexindigo/asynckit)
|
||||||
|
|
||||||
|
[](https://coveralls.io/github/alexindigo/asynckit?branch=master)
|
||||||
|
[](https://david-dm.org/alexindigo/asynckit)
|
||||||
|
[](https://www.bithound.io/github/alexindigo/asynckit)
|
||||||
|
|
||||||
|
<!-- [](https://www.npmjs.com/package/reamde) -->
|
||||||
|
|
||||||
|
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
|
||||||
|
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
|
||||||
|
|
||||||
|
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
|
||||||
|
|
||||||
|
| compression | size |
|
||||||
|
| :----------------- | -------: |
|
||||||
|
| asynckit.js | 12.34 kB |
|
||||||
|
| asynckit.min.js | 4.11 kB |
|
||||||
|
| asynckit.min.js.gz | 1.47 kB |
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install --save asynckit
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Parallel Jobs
|
||||||
|
|
||||||
|
Runs iterator over provided array in parallel. Stores output in the `result` array,
|
||||||
|
on the matching positions. In unlikely event of an error from one of the jobs,
|
||||||
|
will terminate rest of the active jobs (if abort function is provided)
|
||||||
|
and return error along with salvaged data to the main callback function.
|
||||||
|
|
||||||
|
#### Input Array
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var parallel = require('asynckit').parallel
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
||||||
|
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
parallel(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
});
|
||||||
|
|
||||||
|
// async job accepts one element from the array
|
||||||
|
// and a callback function
|
||||||
|
function asyncJob(item, cb)
|
||||||
|
{
|
||||||
|
// different delays (in ms) per item
|
||||||
|
var delay = item * 25;
|
||||||
|
|
||||||
|
// pretend different jobs take different time to finish
|
||||||
|
// and not in consequential order
|
||||||
|
var timeoutId = setTimeout(function() {
|
||||||
|
target.push(item);
|
||||||
|
cb(null, item * 2);
|
||||||
|
}, delay);
|
||||||
|
|
||||||
|
// allow to cancel "leftover" jobs upon error
|
||||||
|
// return function, invoking of which will abort this job
|
||||||
|
return clearTimeout.bind(null, timeoutId);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
|
||||||
|
|
||||||
|
#### Input Object
|
||||||
|
|
||||||
|
Also it supports named jobs, listed via object.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var parallel = require('asynckit/parallel')
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
||||||
|
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
||||||
|
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
||||||
|
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
|
||||||
|
, target = []
|
||||||
|
, keys = []
|
||||||
|
;
|
||||||
|
|
||||||
|
parallel(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
assert.deepEqual(keys, expectedKeys);
|
||||||
|
});
|
||||||
|
|
||||||
|
// supports full value, key, callback (shortcut) interface
|
||||||
|
function asyncJob(item, key, cb)
|
||||||
|
{
|
||||||
|
// different delays (in ms) per item
|
||||||
|
var delay = item * 25;
|
||||||
|
|
||||||
|
// pretend different jobs take different time to finish
|
||||||
|
// and not in consequential order
|
||||||
|
var timeoutId = setTimeout(function() {
|
||||||
|
keys.push(key);
|
||||||
|
target.push(item);
|
||||||
|
cb(null, item * 2);
|
||||||
|
}, delay);
|
||||||
|
|
||||||
|
// allow to cancel "leftover" jobs upon error
|
||||||
|
// return function, invoking of which will abort this job
|
||||||
|
return clearTimeout.bind(null, timeoutId);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
|
||||||
|
|
||||||
|
### Serial Jobs
|
||||||
|
|
||||||
|
Runs iterator over provided array sequentially. Stores output in the `result` array,
|
||||||
|
on the matching positions. In unlikely event of an error from one of the jobs,
|
||||||
|
will not proceed to the rest of the items in the list
|
||||||
|
and return error along with salvaged data to the main callback function.
|
||||||
|
|
||||||
|
#### Input Array
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var serial = require('asynckit/serial')
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
||||||
|
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
serial(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
});
|
||||||
|
|
||||||
|
// extended interface (item, key, callback)
|
||||||
|
// also supported for arrays
|
||||||
|
function asyncJob(item, key, cb)
|
||||||
|
{
|
||||||
|
target.push(key);
|
||||||
|
|
||||||
|
// it will be automatically made async
|
||||||
|
// even it iterator "returns" in the same event loop
|
||||||
|
cb(null, item * 2);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
|
||||||
|
|
||||||
|
#### Input Object
|
||||||
|
|
||||||
|
Also it supports named jobs, listed via object.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var serial = require('asynckit').serial
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
||||||
|
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
||||||
|
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
||||||
|
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
serial(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
});
|
||||||
|
|
||||||
|
// shortcut interface (item, callback)
|
||||||
|
// works for object as well as for the arrays
|
||||||
|
function asyncJob(item, cb)
|
||||||
|
{
|
||||||
|
target.push(item);
|
||||||
|
|
||||||
|
// it will be automatically made async
|
||||||
|
// even it iterator "returns" in the same event loop
|
||||||
|
cb(null, item * 2);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
|
||||||
|
|
||||||
|
_Note: Since _object_ is an _unordered_ collection of properties,
|
||||||
|
it may produce unexpected results with sequential iterations.
|
||||||
|
Whenever order of the jobs' execution is important please use `serialOrdered` method._
|
||||||
|
|
||||||
|
### Ordered Serial Iterations
|
||||||
|
|
||||||
|
TBD
|
||||||
|
|
||||||
|
For example [compare-property](compare-property) package.
|
||||||
|
|
||||||
|
### Streaming interface
|
||||||
|
|
||||||
|
TBD
|
||||||
|
|
||||||
|
## Want to Know More?
|
||||||
|
|
||||||
|
More examples can be found in [test folder](test/).
|
||||||
|
|
||||||
|
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
AsyncKit is licensed under the MIT license.
|
||||||
76
server/node_modules/asynckit/bench.js
generated
vendored
Normal file
76
server/node_modules/asynckit/bench.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
/* eslint no-console: "off" */
|
||||||
|
|
||||||
|
var asynckit = require('./')
|
||||||
|
, async = require('async')
|
||||||
|
, assert = require('assert')
|
||||||
|
, expected = 0
|
||||||
|
;
|
||||||
|
|
||||||
|
var Benchmark = require('benchmark');
|
||||||
|
var suite = new Benchmark.Suite;
|
||||||
|
|
||||||
|
var source = [];
|
||||||
|
for (var z = 1; z < 100; z++)
|
||||||
|
{
|
||||||
|
source.push(z);
|
||||||
|
expected += z;
|
||||||
|
}
|
||||||
|
|
||||||
|
suite
|
||||||
|
// add tests
|
||||||
|
|
||||||
|
.add('async.map', function(deferred)
|
||||||
|
{
|
||||||
|
var total = 0;
|
||||||
|
|
||||||
|
async.map(source,
|
||||||
|
function(i, cb)
|
||||||
|
{
|
||||||
|
setImmediate(function()
|
||||||
|
{
|
||||||
|
total += i;
|
||||||
|
cb(null, total);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(err, result)
|
||||||
|
{
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.equal(result[result.length - 1], expected);
|
||||||
|
deferred.resolve();
|
||||||
|
});
|
||||||
|
}, {'defer': true})
|
||||||
|
|
||||||
|
|
||||||
|
.add('asynckit.parallel', function(deferred)
|
||||||
|
{
|
||||||
|
var total = 0;
|
||||||
|
|
||||||
|
asynckit.parallel(source,
|
||||||
|
function(i, cb)
|
||||||
|
{
|
||||||
|
setImmediate(function()
|
||||||
|
{
|
||||||
|
total += i;
|
||||||
|
cb(null, total);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(err, result)
|
||||||
|
{
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.equal(result[result.length - 1], expected);
|
||||||
|
deferred.resolve();
|
||||||
|
});
|
||||||
|
}, {'defer': true})
|
||||||
|
|
||||||
|
|
||||||
|
// add listeners
|
||||||
|
.on('cycle', function(ev)
|
||||||
|
{
|
||||||
|
console.log(String(ev.target));
|
||||||
|
})
|
||||||
|
.on('complete', function()
|
||||||
|
{
|
||||||
|
console.log('Fastest is ' + this.filter('fastest').map('name'));
|
||||||
|
})
|
||||||
|
// run async
|
||||||
|
.run({ 'async': true });
|
||||||
6
server/node_modules/asynckit/index.js
generated
vendored
Normal file
6
server/node_modules/asynckit/index.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
module.exports =
|
||||||
|
{
|
||||||
|
parallel : require('./parallel.js'),
|
||||||
|
serial : require('./serial.js'),
|
||||||
|
serialOrdered : require('./serialOrdered.js')
|
||||||
|
};
|
||||||
29
server/node_modules/asynckit/lib/abort.js
generated
vendored
Normal file
29
server/node_modules/asynckit/lib/abort.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
// API
|
||||||
|
module.exports = abort;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aborts leftover active jobs
|
||||||
|
*
|
||||||
|
* @param {object} state - current state object
|
||||||
|
*/
|
||||||
|
function abort(state)
|
||||||
|
{
|
||||||
|
Object.keys(state.jobs).forEach(clean.bind(state));
|
||||||
|
|
||||||
|
// reset leftover jobs
|
||||||
|
state.jobs = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans up leftover job by invoking abort function for the provided job id
|
||||||
|
*
|
||||||
|
* @this state
|
||||||
|
* @param {string|number} key - job id to abort
|
||||||
|
*/
|
||||||
|
function clean(key)
|
||||||
|
{
|
||||||
|
if (typeof this.jobs[key] == 'function')
|
||||||
|
{
|
||||||
|
this.jobs[key]();
|
||||||
|
}
|
||||||
|
}
|
||||||
34
server/node_modules/asynckit/lib/async.js
generated
vendored
Normal file
34
server/node_modules/asynckit/lib/async.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
var defer = require('./defer.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = async;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs provided callback asynchronously
|
||||||
|
* even if callback itself is not
|
||||||
|
*
|
||||||
|
* @param {function} callback - callback to invoke
|
||||||
|
* @returns {function} - augmented callback
|
||||||
|
*/
|
||||||
|
function async(callback)
|
||||||
|
{
|
||||||
|
var isAsync = false;
|
||||||
|
|
||||||
|
// check if async happened
|
||||||
|
defer(function() { isAsync = true; });
|
||||||
|
|
||||||
|
return function async_callback(err, result)
|
||||||
|
{
|
||||||
|
if (isAsync)
|
||||||
|
{
|
||||||
|
callback(err, result);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
defer(function nextTick_callback()
|
||||||
|
{
|
||||||
|
callback(err, result);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
26
server/node_modules/asynckit/lib/defer.js
generated
vendored
Normal file
26
server/node_modules/asynckit/lib/defer.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
module.exports = defer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs provided function on next iteration of the event loop
|
||||||
|
*
|
||||||
|
* @param {function} fn - function to run
|
||||||
|
*/
|
||||||
|
function defer(fn)
|
||||||
|
{
|
||||||
|
var nextTick = typeof setImmediate == 'function'
|
||||||
|
? setImmediate
|
||||||
|
: (
|
||||||
|
typeof process == 'object' && typeof process.nextTick == 'function'
|
||||||
|
? process.nextTick
|
||||||
|
: null
|
||||||
|
);
|
||||||
|
|
||||||
|
if (nextTick)
|
||||||
|
{
|
||||||
|
nextTick(fn);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
setTimeout(fn, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
75
server/node_modules/asynckit/lib/iterate.js
generated
vendored
Normal file
75
server/node_modules/asynckit/lib/iterate.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
var async = require('./async.js')
|
||||||
|
, abort = require('./abort.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = iterate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterates over each job object
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {object} state - current job status
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
*/
|
||||||
|
function iterate(list, iterator, state, callback)
|
||||||
|
{
|
||||||
|
// store current index
|
||||||
|
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
|
||||||
|
|
||||||
|
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
|
||||||
|
{
|
||||||
|
// don't repeat yourself
|
||||||
|
// skip secondary callbacks
|
||||||
|
if (!(key in state.jobs))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// clean up jobs
|
||||||
|
delete state.jobs[key];
|
||||||
|
|
||||||
|
if (error)
|
||||||
|
{
|
||||||
|
// don't process rest of the results
|
||||||
|
// stop still active jobs
|
||||||
|
// and reset the list
|
||||||
|
abort(state);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
state.results[key] = output;
|
||||||
|
}
|
||||||
|
|
||||||
|
// return salvaged results
|
||||||
|
callback(error, state.results);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided job element
|
||||||
|
*
|
||||||
|
* @param {function} iterator - iterator to invoke
|
||||||
|
* @param {string|number} key - key/index of the element in the list of jobs
|
||||||
|
* @param {mixed} item - job description
|
||||||
|
* @param {function} callback - invoked after iterator is done with the job
|
||||||
|
* @returns {function|mixed} - job abort function or something else
|
||||||
|
*/
|
||||||
|
function runJob(iterator, key, item, callback)
|
||||||
|
{
|
||||||
|
var aborter;
|
||||||
|
|
||||||
|
// allow shortcut if iterator expects only two arguments
|
||||||
|
if (iterator.length == 2)
|
||||||
|
{
|
||||||
|
aborter = iterator(item, async(callback));
|
||||||
|
}
|
||||||
|
// otherwise go with full three arguments
|
||||||
|
else
|
||||||
|
{
|
||||||
|
aborter = iterator(item, key, async(callback));
|
||||||
|
}
|
||||||
|
|
||||||
|
return aborter;
|
||||||
|
}
|
||||||
91
server/node_modules/asynckit/lib/readable_asynckit.js
generated
vendored
Normal file
91
server/node_modules/asynckit/lib/readable_asynckit.js
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
var streamify = require('./streamify.js')
|
||||||
|
, defer = require('./defer.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableAsyncKit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base constructor for all streams
|
||||||
|
* used to hold properties/methods
|
||||||
|
*/
|
||||||
|
function ReadableAsyncKit()
|
||||||
|
{
|
||||||
|
ReadableAsyncKit.super_.apply(this, arguments);
|
||||||
|
|
||||||
|
// list of active jobs
|
||||||
|
this.jobs = {};
|
||||||
|
|
||||||
|
// add stream methods
|
||||||
|
this.destroy = destroy;
|
||||||
|
this._start = _start;
|
||||||
|
this._read = _read;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Destroys readable stream,
|
||||||
|
* by aborting outstanding jobs
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function destroy()
|
||||||
|
{
|
||||||
|
if (this.destroyed)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.destroyed = true;
|
||||||
|
|
||||||
|
if (typeof this.terminator == 'function')
|
||||||
|
{
|
||||||
|
this.terminator();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Starts provided jobs in async manner
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function _start()
|
||||||
|
{
|
||||||
|
// first argument – runner function
|
||||||
|
var runner = arguments[0]
|
||||||
|
// take away first argument
|
||||||
|
, args = Array.prototype.slice.call(arguments, 1)
|
||||||
|
// second argument - input data
|
||||||
|
, input = args[0]
|
||||||
|
// last argument - result callback
|
||||||
|
, endCb = streamify.callback.call(this, args[args.length - 1])
|
||||||
|
;
|
||||||
|
|
||||||
|
args[args.length - 1] = endCb;
|
||||||
|
// third argument - iterator
|
||||||
|
args[1] = streamify.iterator.call(this, args[1]);
|
||||||
|
|
||||||
|
// allow time for proper setup
|
||||||
|
defer(function()
|
||||||
|
{
|
||||||
|
if (!this.destroyed)
|
||||||
|
{
|
||||||
|
this.terminator = runner.apply(null, args);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
endCb(null, Array.isArray(input) ? [] : {});
|
||||||
|
}
|
||||||
|
}.bind(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implement _read to comply with Readable streams
|
||||||
|
* Doesn't really make sense for flowing object mode
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function _read()
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
25
server/node_modules/asynckit/lib/readable_parallel.js
generated
vendored
Normal file
25
server/node_modules/asynckit/lib/readable_parallel.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
var parallel = require('../parallel.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableParallel;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Streaming wrapper to `asynckit.parallel`
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {stream.Readable#}
|
||||||
|
*/
|
||||||
|
function ReadableParallel(list, iterator, callback)
|
||||||
|
{
|
||||||
|
if (!(this instanceof ReadableParallel))
|
||||||
|
{
|
||||||
|
return new ReadableParallel(list, iterator, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// turn on object mode
|
||||||
|
ReadableParallel.super_.call(this, {objectMode: true});
|
||||||
|
|
||||||
|
this._start(parallel, list, iterator, callback);
|
||||||
|
}
|
||||||
25
server/node_modules/asynckit/lib/readable_serial.js
generated
vendored
Normal file
25
server/node_modules/asynckit/lib/readable_serial.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
var serial = require('../serial.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableSerial;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Streaming wrapper to `asynckit.serial`
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {stream.Readable#}
|
||||||
|
*/
|
||||||
|
function ReadableSerial(list, iterator, callback)
|
||||||
|
{
|
||||||
|
if (!(this instanceof ReadableSerial))
|
||||||
|
{
|
||||||
|
return new ReadableSerial(list, iterator, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// turn on object mode
|
||||||
|
ReadableSerial.super_.call(this, {objectMode: true});
|
||||||
|
|
||||||
|
this._start(serial, list, iterator, callback);
|
||||||
|
}
|
||||||
29
server/node_modules/asynckit/lib/readable_serial_ordered.js
generated
vendored
Normal file
29
server/node_modules/asynckit/lib/readable_serial_ordered.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
var serialOrdered = require('../serialOrdered.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableSerialOrdered;
|
||||||
|
// expose sort helpers
|
||||||
|
module.exports.ascending = serialOrdered.ascending;
|
||||||
|
module.exports.descending = serialOrdered.descending;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Streaming wrapper to `asynckit.serialOrdered`
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} sortMethod - custom sort function
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {stream.Readable#}
|
||||||
|
*/
|
||||||
|
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
|
||||||
|
{
|
||||||
|
if (!(this instanceof ReadableSerialOrdered))
|
||||||
|
{
|
||||||
|
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// turn on object mode
|
||||||
|
ReadableSerialOrdered.super_.call(this, {objectMode: true});
|
||||||
|
|
||||||
|
this._start(serialOrdered, list, iterator, sortMethod, callback);
|
||||||
|
}
|
||||||
37
server/node_modules/asynckit/lib/state.js
generated
vendored
Normal file
37
server/node_modules/asynckit/lib/state.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
// API
|
||||||
|
module.exports = state;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates initial state object
|
||||||
|
* for iteration over list
|
||||||
|
*
|
||||||
|
* @param {array|object} list - list to iterate over
|
||||||
|
* @param {function|null} sortMethod - function to use for keys sort,
|
||||||
|
* or `null` to keep them as is
|
||||||
|
* @returns {object} - initial state object
|
||||||
|
*/
|
||||||
|
function state(list, sortMethod)
|
||||||
|
{
|
||||||
|
var isNamedList = !Array.isArray(list)
|
||||||
|
, initState =
|
||||||
|
{
|
||||||
|
index : 0,
|
||||||
|
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
|
||||||
|
jobs : {},
|
||||||
|
results : isNamedList ? {} : [],
|
||||||
|
size : isNamedList ? Object.keys(list).length : list.length
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
if (sortMethod)
|
||||||
|
{
|
||||||
|
// sort array keys based on it's values
|
||||||
|
// sort object's keys just on own merit
|
||||||
|
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
|
||||||
|
{
|
||||||
|
return sortMethod(list[a], list[b]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return initState;
|
||||||
|
}
|
||||||
141
server/node_modules/asynckit/lib/streamify.js
generated
vendored
Normal file
141
server/node_modules/asynckit/lib/streamify.js
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
var async = require('./async.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = {
|
||||||
|
iterator: wrapIterator,
|
||||||
|
callback: wrapCallback
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps iterators with long signature
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {function} iterator - function to wrap
|
||||||
|
* @returns {function} - wrapped function
|
||||||
|
*/
|
||||||
|
function wrapIterator(iterator)
|
||||||
|
{
|
||||||
|
var stream = this;
|
||||||
|
|
||||||
|
return function(item, key, cb)
|
||||||
|
{
|
||||||
|
var aborter
|
||||||
|
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
|
||||||
|
;
|
||||||
|
|
||||||
|
stream.jobs[key] = wrappedCb;
|
||||||
|
|
||||||
|
// it's either shortcut (item, cb)
|
||||||
|
if (iterator.length == 2)
|
||||||
|
{
|
||||||
|
aborter = iterator(item, wrappedCb);
|
||||||
|
}
|
||||||
|
// or long format (item, key, cb)
|
||||||
|
else
|
||||||
|
{
|
||||||
|
aborter = iterator(item, key, wrappedCb);
|
||||||
|
}
|
||||||
|
|
||||||
|
return aborter;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps provided callback function
|
||||||
|
* allowing to execute snitch function before
|
||||||
|
* real callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {function} callback - function to wrap
|
||||||
|
* @returns {function} - wrapped function
|
||||||
|
*/
|
||||||
|
function wrapCallback(callback)
|
||||||
|
{
|
||||||
|
var stream = this;
|
||||||
|
|
||||||
|
var wrapped = function(error, result)
|
||||||
|
{
|
||||||
|
return finisher.call(stream, error, result, callback);
|
||||||
|
};
|
||||||
|
|
||||||
|
return wrapped;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps provided iterator callback function
|
||||||
|
* makes sure snitch only called once,
|
||||||
|
* but passes secondary calls to the original callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {function} callback - callback to wrap
|
||||||
|
* @param {number|string} key - iteration key
|
||||||
|
* @returns {function} wrapped callback
|
||||||
|
*/
|
||||||
|
function wrapIteratorCallback(callback, key)
|
||||||
|
{
|
||||||
|
var stream = this;
|
||||||
|
|
||||||
|
return function(error, output)
|
||||||
|
{
|
||||||
|
// don't repeat yourself
|
||||||
|
if (!(key in stream.jobs))
|
||||||
|
{
|
||||||
|
callback(error, output);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// clean up jobs
|
||||||
|
delete stream.jobs[key];
|
||||||
|
|
||||||
|
return streamer.call(stream, error, {key: key, value: output}, callback);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stream wrapper for iterator callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {mixed} error - error response
|
||||||
|
* @param {mixed} output - iterator output
|
||||||
|
* @param {function} callback - callback that expects iterator results
|
||||||
|
*/
|
||||||
|
function streamer(error, output, callback)
|
||||||
|
{
|
||||||
|
if (error && !this.error)
|
||||||
|
{
|
||||||
|
this.error = error;
|
||||||
|
this.pause();
|
||||||
|
this.emit('error', error);
|
||||||
|
// send back value only, as expected
|
||||||
|
callback(error, output && output.value);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// stream stuff
|
||||||
|
this.push(output);
|
||||||
|
|
||||||
|
// back to original track
|
||||||
|
// send back value only, as expected
|
||||||
|
callback(error, output && output.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stream wrapper for finishing callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {mixed} error - error response
|
||||||
|
* @param {mixed} output - iterator output
|
||||||
|
* @param {function} callback - callback that expects final results
|
||||||
|
*/
|
||||||
|
function finisher(error, output, callback)
|
||||||
|
{
|
||||||
|
// signal end of the stream
|
||||||
|
// only for successfully finished streams
|
||||||
|
if (!error)
|
||||||
|
{
|
||||||
|
this.push(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
// back to original track
|
||||||
|
callback(error, output);
|
||||||
|
}
|
||||||
29
server/node_modules/asynckit/lib/terminator.js
generated
vendored
Normal file
29
server/node_modules/asynckit/lib/terminator.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
var abort = require('./abort.js')
|
||||||
|
, async = require('./async.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = terminator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Terminates jobs in the attached state context
|
||||||
|
*
|
||||||
|
* @this AsyncKitState#
|
||||||
|
* @param {function} callback - final callback to invoke after termination
|
||||||
|
*/
|
||||||
|
function terminator(callback)
|
||||||
|
{
|
||||||
|
if (!Object.keys(this.jobs).length)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// fast forward iteration index
|
||||||
|
this.index = this.size;
|
||||||
|
|
||||||
|
// abort jobs
|
||||||
|
abort(this);
|
||||||
|
|
||||||
|
// send back results we have so far
|
||||||
|
async(callback)(null, this.results);
|
||||||
|
}
|
||||||
63
server/node_modules/asynckit/package.json
generated
vendored
Normal file
63
server/node_modules/asynckit/package.json
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
{
|
||||||
|
"name": "asynckit",
|
||||||
|
"version": "0.4.0",
|
||||||
|
"description": "Minimal async jobs utility library, with streams support",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"clean": "rimraf coverage",
|
||||||
|
"lint": "eslint *.js lib/*.js test/*.js",
|
||||||
|
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
|
||||||
|
"win-test": "tape test/test-*.js",
|
||||||
|
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
|
||||||
|
"report": "istanbul report",
|
||||||
|
"size": "browserify index.js | size-table asynckit",
|
||||||
|
"debug": "tape test/test-*.js"
|
||||||
|
},
|
||||||
|
"pre-commit": [
|
||||||
|
"clean",
|
||||||
|
"lint",
|
||||||
|
"test",
|
||||||
|
"browser",
|
||||||
|
"report",
|
||||||
|
"size"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/alexindigo/asynckit.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"async",
|
||||||
|
"jobs",
|
||||||
|
"parallel",
|
||||||
|
"serial",
|
||||||
|
"iterator",
|
||||||
|
"array",
|
||||||
|
"object",
|
||||||
|
"stream",
|
||||||
|
"destroy",
|
||||||
|
"terminate",
|
||||||
|
"abort"
|
||||||
|
],
|
||||||
|
"author": "Alex Indigo <iam@alexindigo.com>",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/alexindigo/asynckit/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/alexindigo/asynckit#readme",
|
||||||
|
"devDependencies": {
|
||||||
|
"browserify": "^13.0.0",
|
||||||
|
"browserify-istanbul": "^2.0.0",
|
||||||
|
"coveralls": "^2.11.9",
|
||||||
|
"eslint": "^2.9.0",
|
||||||
|
"istanbul": "^0.4.3",
|
||||||
|
"obake": "^0.1.2",
|
||||||
|
"phantomjs-prebuilt": "^2.1.7",
|
||||||
|
"pre-commit": "^1.1.3",
|
||||||
|
"reamde": "^1.1.0",
|
||||||
|
"rimraf": "^2.5.2",
|
||||||
|
"size-table": "^0.2.0",
|
||||||
|
"tap-spec": "^4.1.1",
|
||||||
|
"tape": "^4.5.1"
|
||||||
|
},
|
||||||
|
"dependencies": {}
|
||||||
|
}
|
||||||
43
server/node_modules/asynckit/parallel.js
generated
vendored
Normal file
43
server/node_modules/asynckit/parallel.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
var iterate = require('./lib/iterate.js')
|
||||||
|
, initState = require('./lib/state.js')
|
||||||
|
, terminator = require('./lib/terminator.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
module.exports = parallel;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided array elements in parallel
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {function} - jobs terminator
|
||||||
|
*/
|
||||||
|
function parallel(list, iterator, callback)
|
||||||
|
{
|
||||||
|
var state = initState(list);
|
||||||
|
|
||||||
|
while (state.index < (state['keyedList'] || list).length)
|
||||||
|
{
|
||||||
|
iterate(list, iterator, state, function(error, result)
|
||||||
|
{
|
||||||
|
if (error)
|
||||||
|
{
|
||||||
|
callback(error, result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// looks like it's the last one
|
||||||
|
if (Object.keys(state.jobs).length === 0)
|
||||||
|
{
|
||||||
|
callback(null, state.results);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
state.index++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return terminator.bind(state, callback);
|
||||||
|
}
|
||||||
17
server/node_modules/asynckit/serial.js
generated
vendored
Normal file
17
server/node_modules/asynckit/serial.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
var serialOrdered = require('./serialOrdered.js');
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
module.exports = serial;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided array elements in series
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {function} - jobs terminator
|
||||||
|
*/
|
||||||
|
function serial(list, iterator, callback)
|
||||||
|
{
|
||||||
|
return serialOrdered(list, iterator, null, callback);
|
||||||
|
}
|
||||||
75
server/node_modules/asynckit/serialOrdered.js
generated
vendored
Normal file
75
server/node_modules/asynckit/serialOrdered.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
var iterate = require('./lib/iterate.js')
|
||||||
|
, initState = require('./lib/state.js')
|
||||||
|
, terminator = require('./lib/terminator.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
module.exports = serialOrdered;
|
||||||
|
// sorting helpers
|
||||||
|
module.exports.ascending = ascending;
|
||||||
|
module.exports.descending = descending;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided sorted array elements in series
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} sortMethod - custom sort function
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {function} - jobs terminator
|
||||||
|
*/
|
||||||
|
function serialOrdered(list, iterator, sortMethod, callback)
|
||||||
|
{
|
||||||
|
var state = initState(list, sortMethod);
|
||||||
|
|
||||||
|
iterate(list, iterator, state, function iteratorHandler(error, result)
|
||||||
|
{
|
||||||
|
if (error)
|
||||||
|
{
|
||||||
|
callback(error, result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.index++;
|
||||||
|
|
||||||
|
// are we there yet?
|
||||||
|
if (state.index < (state['keyedList'] || list).length)
|
||||||
|
{
|
||||||
|
iterate(list, iterator, state, iteratorHandler);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// done here
|
||||||
|
callback(null, state.results);
|
||||||
|
});
|
||||||
|
|
||||||
|
return terminator.bind(state, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* -- Sort methods
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* sort helper to sort array elements in ascending order
|
||||||
|
*
|
||||||
|
* @param {mixed} a - an item to compare
|
||||||
|
* @param {mixed} b - an item to compare
|
||||||
|
* @returns {number} - comparison result
|
||||||
|
*/
|
||||||
|
function ascending(a, b)
|
||||||
|
{
|
||||||
|
return a < b ? -1 : a > b ? 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* sort helper to sort array elements in descending order
|
||||||
|
*
|
||||||
|
* @param {mixed} a - an item to compare
|
||||||
|
* @param {mixed} b - an item to compare
|
||||||
|
* @returns {number} - comparison result
|
||||||
|
*/
|
||||||
|
function descending(a, b)
|
||||||
|
{
|
||||||
|
return -1 * ascending(a, b);
|
||||||
|
}
|
||||||
21
server/node_modules/asynckit/stream.js
generated
vendored
Normal file
21
server/node_modules/asynckit/stream.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
var inherits = require('util').inherits
|
||||||
|
, Readable = require('stream').Readable
|
||||||
|
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
|
||||||
|
, ReadableParallel = require('./lib/readable_parallel.js')
|
||||||
|
, ReadableSerial = require('./lib/readable_serial.js')
|
||||||
|
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports =
|
||||||
|
{
|
||||||
|
parallel : ReadableParallel,
|
||||||
|
serial : ReadableSerial,
|
||||||
|
serialOrdered : ReadableSerialOrdered,
|
||||||
|
};
|
||||||
|
|
||||||
|
inherits(ReadableAsyncKit, Readable);
|
||||||
|
|
||||||
|
inherits(ReadableParallel, ReadableAsyncKit);
|
||||||
|
inherits(ReadableSerial, ReadableAsyncKit);
|
||||||
|
inherits(ReadableSerialOrdered, ReadableAsyncKit);
|
||||||
1416
server/node_modules/axios/CHANGELOG.md
generated
vendored
Normal file
1416
server/node_modules/axios/CHANGELOG.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7
server/node_modules/axios/LICENSE
generated
vendored
Normal file
7
server/node_modules/axios/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Copyright (c) 2014-present Matt Zabriskie & Collaborators
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
877
server/node_modules/axios/MIGRATION_GUIDE.md
generated
vendored
Normal file
877
server/node_modules/axios/MIGRATION_GUIDE.md
generated
vendored
Normal file
@@ -0,0 +1,877 @@
|
|||||||
|
# Axios Migration Guide
|
||||||
|
|
||||||
|
> **Migrating from Axios 0.x to 1.x**
|
||||||
|
>
|
||||||
|
> This guide helps developers upgrade from Axios 0.x to 1.x by documenting breaking changes, providing migration strategies, and offering solutions to common upgrade challenges.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Overview](#overview)
|
||||||
|
- [Breaking Changes](#breaking-changes)
|
||||||
|
- [Error Handling Migration](#error-handling-migration)
|
||||||
|
- [API Changes](#api-changes)
|
||||||
|
- [Configuration Changes](#configuration-changes)
|
||||||
|
- [Migration Strategies](#migration-strategies)
|
||||||
|
- [Common Patterns](#common-patterns)
|
||||||
|
- [Troubleshooting](#troubleshooting)
|
||||||
|
- [Resources](#resources)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Axios 1.x introduced several breaking changes to improve consistency, security, and developer experience. While these changes provide better error handling and more predictable behavior, they require code updates when migrating from 0.x versions.
|
||||||
|
|
||||||
|
### Key Changes Summary
|
||||||
|
|
||||||
|
| Area | 0.x Behavior | 1.x Behavior | Impact |
|
||||||
|
|------|--------------|--------------|--------|
|
||||||
|
| Error Handling | Selective throwing | Consistent throwing | High |
|
||||||
|
| JSON Parsing | Lenient | Strict | Medium |
|
||||||
|
| Browser Support | IE11+ | Modern browsers | Low-Medium |
|
||||||
|
| TypeScript | Partial | Full support | Low |
|
||||||
|
|
||||||
|
### Migration Complexity
|
||||||
|
|
||||||
|
- **Simple applications**: 1-2 hours
|
||||||
|
- **Medium applications**: 1-2 days
|
||||||
|
- **Large applications with complex error handling**: 3-5 days
|
||||||
|
|
||||||
|
## Breaking Changes
|
||||||
|
|
||||||
|
### 1. Error Handling Changes
|
||||||
|
|
||||||
|
**The most significant change in Axios 1.x is how errors are handled.**
|
||||||
|
|
||||||
|
#### 0.x Behavior
|
||||||
|
```javascript
|
||||||
|
// Axios 0.x - Some HTTP error codes didn't throw
|
||||||
|
axios.get('/api/data')
|
||||||
|
.then(response => {
|
||||||
|
// Response interceptor could handle all errors
|
||||||
|
console.log('Success:', response.data);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Response interceptor handled everything
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
handleError(error);
|
||||||
|
// Error was "handled" and didn't propagate
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 1.x Behavior
|
||||||
|
```javascript
|
||||||
|
// Axios 1.x - All HTTP errors throw consistently
|
||||||
|
axios.get('/api/data')
|
||||||
|
.then(response => {
|
||||||
|
console.log('Success:', response.data);
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
// Must handle errors at call site or they propagate
|
||||||
|
console.error('Request failed:', error);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Response interceptor must re-throw or return rejected promise
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
handleError(error);
|
||||||
|
// Must explicitly handle propagation
|
||||||
|
return Promise.reject(error); // or throw error;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Impact
|
||||||
|
- **Response interceptors** can no longer "swallow" errors silently
|
||||||
|
- **Every API call** must handle errors explicitly or they become unhandled promise rejections
|
||||||
|
- **Centralized error handling** requires new patterns
|
||||||
|
|
||||||
|
### 2. JSON Parsing Changes
|
||||||
|
|
||||||
|
#### 0.x Behavior
|
||||||
|
```javascript
|
||||||
|
// Axios 0.x - Lenient JSON parsing
|
||||||
|
// Would attempt to parse even invalid JSON
|
||||||
|
response.data; // Might contain partial data or fallbacks
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 1.x Behavior
|
||||||
|
```javascript
|
||||||
|
// Axios 1.x - Strict JSON parsing
|
||||||
|
// Throws clear errors for invalid JSON
|
||||||
|
try {
|
||||||
|
const data = response.data;
|
||||||
|
} catch (error) {
|
||||||
|
// Handle JSON parsing errors explicitly
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Request/Response Transform Changes
|
||||||
|
|
||||||
|
#### 0.x Behavior
|
||||||
|
```javascript
|
||||||
|
// Implicit transformations with some edge cases
|
||||||
|
transformRequest: [function (data) {
|
||||||
|
// Less predictable behavior
|
||||||
|
return data;
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 1.x Behavior
|
||||||
|
```javascript
|
||||||
|
// More consistent transformation pipeline
|
||||||
|
transformRequest: [function (data, headers) {
|
||||||
|
// Headers parameter always available
|
||||||
|
// More predictable behavior
|
||||||
|
return data;
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Browser Support Changes
|
||||||
|
|
||||||
|
- **0.x**: Supported IE11 and older browsers
|
||||||
|
- **1.x**: Requires modern browsers with Promise support
|
||||||
|
- **Polyfills**: May be needed for older browser support
|
||||||
|
|
||||||
|
## Error Handling Migration
|
||||||
|
|
||||||
|
The error handling changes are the most complex part of migrating to Axios 1.x. Here are proven strategies:
|
||||||
|
|
||||||
|
### Strategy 1: Centralized Error Handling with Error Boundary
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Create a centralized error handler
|
||||||
|
class ApiErrorHandler {
|
||||||
|
constructor() {
|
||||||
|
this.setupInterceptors();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupInterceptors() {
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
// Centralized error processing
|
||||||
|
this.processError(error);
|
||||||
|
|
||||||
|
// Return a resolved promise with error info for handled errors
|
||||||
|
if (this.isHandledError(error)) {
|
||||||
|
return Promise.resolve({
|
||||||
|
data: null,
|
||||||
|
error: this.normalizeError(error),
|
||||||
|
handled: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-throw unhandled errors
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
processError(error) {
|
||||||
|
// Log errors
|
||||||
|
console.error('API Error:', error);
|
||||||
|
|
||||||
|
// Show user notifications
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
this.handleAuthError();
|
||||||
|
} else if (error.response?.status >= 500) {
|
||||||
|
this.showErrorNotification('Server error occurred');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isHandledError(error) {
|
||||||
|
// Define which errors are "handled" centrally
|
||||||
|
const handledStatuses = [401, 403, 404, 422, 500, 502, 503];
|
||||||
|
return handledStatuses.includes(error.response?.status);
|
||||||
|
}
|
||||||
|
|
||||||
|
normalizeError(error) {
|
||||||
|
return {
|
||||||
|
status: error.response?.status,
|
||||||
|
message: error.response?.data?.message || error.message,
|
||||||
|
code: error.response?.data?.code || error.code
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
handleAuthError() {
|
||||||
|
// Redirect to login, clear tokens, etc.
|
||||||
|
localStorage.removeItem('token');
|
||||||
|
window.location.href = '/login';
|
||||||
|
}
|
||||||
|
|
||||||
|
showErrorNotification(message) {
|
||||||
|
// Show user-friendly error message
|
||||||
|
console.error(message); // Replace with your notification system
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize globally
|
||||||
|
const errorHandler = new ApiErrorHandler();
|
||||||
|
|
||||||
|
// Usage in components/services
|
||||||
|
async function fetchUserData(userId) {
|
||||||
|
try {
|
||||||
|
const response = await axios.get(`/api/users/${userId}`);
|
||||||
|
|
||||||
|
// Check if error was handled centrally
|
||||||
|
if (response.handled) {
|
||||||
|
return { data: null, error: response.error };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { data: response.data, error: null };
|
||||||
|
} catch (error) {
|
||||||
|
// Unhandled errors still need local handling
|
||||||
|
return { data: null, error: { message: 'Unexpected error occurred' } };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Strategy 2: Wrapper Function Pattern
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Create a wrapper that provides 0.x-like behavior
|
||||||
|
function createApiWrapper() {
|
||||||
|
const api = axios.create();
|
||||||
|
|
||||||
|
// Add response interceptor for centralized handling
|
||||||
|
api.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
// Handle common errors centrally
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
// Handle auth errors
|
||||||
|
handleAuthError();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status >= 500) {
|
||||||
|
// Handle server errors
|
||||||
|
showServerErrorNotification();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always reject to maintain error propagation
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wrapper function that mimics 0.x behavior
|
||||||
|
function safeRequest(requestConfig, options = {}) {
|
||||||
|
return api(requestConfig)
|
||||||
|
.then(response => response)
|
||||||
|
.catch(error => {
|
||||||
|
if (options.suppressErrors) {
|
||||||
|
// Return error info instead of throwing
|
||||||
|
return {
|
||||||
|
data: null,
|
||||||
|
error: {
|
||||||
|
status: error.response?.status,
|
||||||
|
message: error.response?.data?.message || error.message
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { safeRequest, axios: api };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Usage
|
||||||
|
const { safeRequest } = createApiWrapper();
|
||||||
|
|
||||||
|
// For calls where you want centralized error handling
|
||||||
|
const result = await safeRequest(
|
||||||
|
{ method: 'get', url: '/api/data' },
|
||||||
|
{ suppressErrors: true }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
// Handle error case
|
||||||
|
console.log('Request failed:', result.error.message);
|
||||||
|
} else {
|
||||||
|
// Handle success case
|
||||||
|
console.log('Data:', result.data);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Strategy 3: Global Error Handler with Custom Events
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Set up global error handling with events
|
||||||
|
class GlobalErrorHandler extends EventTarget {
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this.setupInterceptors();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupInterceptors() {
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
// Emit custom event for global handling
|
||||||
|
this.dispatchEvent(new CustomEvent('apiError', {
|
||||||
|
detail: { error, timestamp: new Date() }
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Always reject to maintain proper error flow
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const globalErrorHandler = new GlobalErrorHandler();
|
||||||
|
|
||||||
|
// Set up global listeners
|
||||||
|
globalErrorHandler.addEventListener('apiError', (event) => {
|
||||||
|
const { error } = event.detail;
|
||||||
|
|
||||||
|
// Centralized error logic
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
handleAuthError();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status >= 500) {
|
||||||
|
showErrorNotification('Server error occurred');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Usage remains clean
|
||||||
|
async function apiCall() {
|
||||||
|
try {
|
||||||
|
const response = await axios.get('/api/data');
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
// Error was already handled globally
|
||||||
|
// Just handle component-specific logic
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Changes
|
||||||
|
|
||||||
|
### Request Configuration
|
||||||
|
|
||||||
|
#### 0.x to 1.x Changes
|
||||||
|
```javascript
|
||||||
|
// 0.x - Some properties had different defaults
|
||||||
|
const config = {
|
||||||
|
timeout: 0, // No timeout by default
|
||||||
|
maxContentLength: -1, // No limit
|
||||||
|
};
|
||||||
|
|
||||||
|
// 1.x - More secure defaults
|
||||||
|
const config = {
|
||||||
|
timeout: 0, // Still no timeout, but easier to configure
|
||||||
|
maxContentLength: 2000, // Default limit for security
|
||||||
|
maxBodyLength: 2000, // New property
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Response Object
|
||||||
|
|
||||||
|
The response object structure remains largely the same, but error responses are more consistent:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Both 0.x and 1.x
|
||||||
|
response = {
|
||||||
|
data: {}, // Response body
|
||||||
|
status: 200, // HTTP status
|
||||||
|
statusText: 'OK', // HTTP status message
|
||||||
|
headers: {}, // Response headers
|
||||||
|
config: {}, // Request config
|
||||||
|
request: {} // Request object
|
||||||
|
};
|
||||||
|
|
||||||
|
// Error responses are more consistent in 1.x
|
||||||
|
error.response = {
|
||||||
|
data: {}, // Error response body
|
||||||
|
status: 404, // HTTP error status
|
||||||
|
statusText: 'Not Found',
|
||||||
|
headers: {},
|
||||||
|
config: {},
|
||||||
|
request: {}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Changes
|
||||||
|
|
||||||
|
### Default Configuration Updates
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// 0.x defaults
|
||||||
|
axios.defaults.timeout = 0; // No timeout
|
||||||
|
axios.defaults.maxContentLength = -1; // No limit
|
||||||
|
|
||||||
|
// 1.x defaults (more secure)
|
||||||
|
axios.defaults.timeout = 0; // Still no timeout
|
||||||
|
axios.defaults.maxContentLength = 2000; // 2MB limit
|
||||||
|
axios.defaults.maxBodyLength = 2000; // 2MB limit
|
||||||
|
```
|
||||||
|
|
||||||
|
### Instance Configuration
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// 0.x - Instance creation
|
||||||
|
const api = axios.create({
|
||||||
|
baseURL: 'https://api.example.com',
|
||||||
|
timeout: 1000,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 1.x - Same API, but more options available
|
||||||
|
const api = axios.create({
|
||||||
|
baseURL: 'https://api.example.com',
|
||||||
|
timeout: 1000,
|
||||||
|
maxBodyLength: Infinity, // Override default if needed
|
||||||
|
maxContentLength: Infinity,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Migration Strategies
|
||||||
|
|
||||||
|
### Step-by-Step Migration Process
|
||||||
|
|
||||||
|
#### Phase 1: Preparation
|
||||||
|
1. **Audit Current Error Handling**
|
||||||
|
```bash
|
||||||
|
# Find all axios usage
|
||||||
|
grep -r "axios\." src/
|
||||||
|
grep -r "\.catch" src/
|
||||||
|
grep -r "interceptors" src/
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Identify Patterns**
|
||||||
|
- Response interceptors that handle errors
|
||||||
|
- Components that rely on centralized error handling
|
||||||
|
- Authentication and retry logic
|
||||||
|
|
||||||
|
3. **Create Test Cases**
|
||||||
|
```javascript
|
||||||
|
// Test current error handling behavior
|
||||||
|
describe('Error Handling Migration', () => {
|
||||||
|
it('should handle 401 errors consistently', async () => {
|
||||||
|
// Test authentication error flows
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle 500 errors with user feedback', async () => {
|
||||||
|
// Test server error handling
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Phase 2: Implementation
|
||||||
|
1. **Update Dependencies**
|
||||||
|
```bash
|
||||||
|
npm update axios
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Implement New Error Handling**
|
||||||
|
- Choose one of the strategies above
|
||||||
|
- Update response interceptors
|
||||||
|
- Add error handling to API calls
|
||||||
|
|
||||||
|
3. **Update Authentication Logic**
|
||||||
|
```javascript
|
||||||
|
// 0.x pattern
|
||||||
|
axios.interceptors.response.use(null, error => {
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
logout();
|
||||||
|
// Error was "handled"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// 1.x pattern
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
logout();
|
||||||
|
}
|
||||||
|
return Promise.reject(error); // Always propagate
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Phase 3: Testing and Validation
|
||||||
|
1. **Test Error Scenarios**
|
||||||
|
- Network failures
|
||||||
|
- HTTP error codes (401, 403, 404, 500, etc.)
|
||||||
|
- Timeout errors
|
||||||
|
- JSON parsing errors
|
||||||
|
|
||||||
|
2. **Validate User Experience**
|
||||||
|
- Error messages are shown appropriately
|
||||||
|
- Authentication redirects work
|
||||||
|
- Loading states are handled correctly
|
||||||
|
|
||||||
|
### Gradual Migration Approach
|
||||||
|
|
||||||
|
For large applications, consider gradual migration:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Create a compatibility layer
|
||||||
|
const axiosCompat = {
|
||||||
|
// Use new axios instance for new code
|
||||||
|
v1: axios.create({
|
||||||
|
// 1.x configuration
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Wrapper for legacy code
|
||||||
|
legacy: createLegacyWrapper(axios.create({
|
||||||
|
// Configuration that mimics 0.x behavior
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
function createLegacyWrapper(axiosInstance) {
|
||||||
|
// Add interceptors that provide 0.x-like behavior
|
||||||
|
axiosInstance.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
// Handle errors in 0.x style for legacy code
|
||||||
|
handleLegacyError(error);
|
||||||
|
// Don't propagate certain errors
|
||||||
|
if (shouldSuppressError(error)) {
|
||||||
|
return Promise.resolve({ data: null, error: true });
|
||||||
|
}
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return axiosInstance;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Authentication Interceptors
|
||||||
|
|
||||||
|
#### Updated Authentication Pattern
|
||||||
|
```javascript
|
||||||
|
// Token refresh interceptor for 1.x
|
||||||
|
let isRefreshing = false;
|
||||||
|
let refreshSubscribers = [];
|
||||||
|
|
||||||
|
function subscribeTokenRefresh(cb) {
|
||||||
|
refreshSubscribers.push(cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
function onTokenRefreshed(token) {
|
||||||
|
refreshSubscribers.forEach(cb => cb(token));
|
||||||
|
refreshSubscribers = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
async error => {
|
||||||
|
const originalRequest = error.config;
|
||||||
|
|
||||||
|
if (error.response?.status === 401 && !originalRequest._retry) {
|
||||||
|
if (isRefreshing) {
|
||||||
|
// Wait for token refresh
|
||||||
|
return new Promise(resolve => {
|
||||||
|
subscribeTokenRefresh(token => {
|
||||||
|
originalRequest.headers.Authorization = `Bearer ${token}`;
|
||||||
|
resolve(axios(originalRequest));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
originalRequest._retry = true;
|
||||||
|
isRefreshing = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const newToken = await refreshToken();
|
||||||
|
onTokenRefreshed(newToken);
|
||||||
|
isRefreshing = false;
|
||||||
|
|
||||||
|
originalRequest.headers.Authorization = `Bearer ${newToken}`;
|
||||||
|
return axios(originalRequest);
|
||||||
|
} catch (refreshError) {
|
||||||
|
isRefreshing = false;
|
||||||
|
logout();
|
||||||
|
return Promise.reject(refreshError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Retry Logic
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Retry interceptor for 1.x
|
||||||
|
function createRetryInterceptor(maxRetries = 3, retryDelay = 1000) {
|
||||||
|
return axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
async error => {
|
||||||
|
const config = error.config;
|
||||||
|
|
||||||
|
if (!config || !config.retry) {
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
config.__retryCount = config.__retryCount || 0;
|
||||||
|
|
||||||
|
if (config.__retryCount >= maxRetries) {
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
config.__retryCount += 1;
|
||||||
|
|
||||||
|
// Exponential backoff
|
||||||
|
const delay = retryDelay * Math.pow(2, config.__retryCount - 1);
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
|
|
||||||
|
return axios(config);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Usage
|
||||||
|
const api = axios.create();
|
||||||
|
createRetryInterceptor(3, 1000);
|
||||||
|
|
||||||
|
// Make request with retry
|
||||||
|
api.get('/api/data', { retry: true });
|
||||||
|
```
|
||||||
|
|
||||||
|
### Loading State Management
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Loading interceptor for 1.x
|
||||||
|
class LoadingManager {
|
||||||
|
constructor() {
|
||||||
|
this.requests = new Set();
|
||||||
|
this.setupInterceptors();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupInterceptors() {
|
||||||
|
axios.interceptors.request.use(config => {
|
||||||
|
this.requests.add(config);
|
||||||
|
this.updateLoadingState();
|
||||||
|
return config;
|
||||||
|
});
|
||||||
|
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => {
|
||||||
|
this.requests.delete(response.config);
|
||||||
|
this.updateLoadingState();
|
||||||
|
return response;
|
||||||
|
},
|
||||||
|
error => {
|
||||||
|
this.requests.delete(error.config);
|
||||||
|
this.updateLoadingState();
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateLoadingState() {
|
||||||
|
const isLoading = this.requests.size > 0;
|
||||||
|
// Update your loading UI
|
||||||
|
document.body.classList.toggle('loading', isLoading);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const loadingManager = new LoadingManager();
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Migration Issues
|
||||||
|
|
||||||
|
#### Issue 1: Unhandled Promise Rejections
|
||||||
|
|
||||||
|
**Problem:**
|
||||||
|
```javascript
|
||||||
|
// This pattern worked in 0.x but causes unhandled rejections in 1.x
|
||||||
|
axios.get('/api/data'); // No .catch() handler
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```javascript
|
||||||
|
// Always handle promises
|
||||||
|
axios.get('/api/data')
|
||||||
|
.catch(error => {
|
||||||
|
// Handle error appropriately
|
||||||
|
console.error('Request failed:', error.message);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Or use async/await with try/catch
|
||||||
|
async function fetchData() {
|
||||||
|
try {
|
||||||
|
const response = await axios.get('/api/data');
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Request failed:', error.message);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 2: Response Interceptors Not "Handling" Errors
|
||||||
|
|
||||||
|
**Problem:**
|
||||||
|
```javascript
|
||||||
|
// 0.x style - interceptor "handled" errors
|
||||||
|
axios.interceptors.response.use(null, error => {
|
||||||
|
showErrorMessage(error.message);
|
||||||
|
// Error was considered "handled"
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```javascript
|
||||||
|
// 1.x style - explicitly control error propagation
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => response,
|
||||||
|
error => {
|
||||||
|
showErrorMessage(error.message);
|
||||||
|
|
||||||
|
// Choose whether to propagate the error
|
||||||
|
if (shouldPropagateError(error)) {
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return success-like response for "handled" errors
|
||||||
|
return Promise.resolve({
|
||||||
|
data: null,
|
||||||
|
handled: true,
|
||||||
|
error: normalizeError(error)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 3: JSON Parsing Errors
|
||||||
|
|
||||||
|
**Problem:**
|
||||||
|
```javascript
|
||||||
|
// 1.x is stricter about JSON parsing
|
||||||
|
// This might throw where 0.x was lenient
|
||||||
|
const data = response.data;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```javascript
|
||||||
|
// Add response transformer for better error handling
|
||||||
|
axios.defaults.transformResponse = [
|
||||||
|
function (data) {
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
try {
|
||||||
|
return JSON.parse(data);
|
||||||
|
} catch (e) {
|
||||||
|
// Handle JSON parsing errors gracefully
|
||||||
|
console.warn('Invalid JSON response:', data);
|
||||||
|
return { error: 'Invalid JSON', rawData: data };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
];
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 4: TypeScript Errors After Upgrade
|
||||||
|
|
||||||
|
**Problem:**
|
||||||
|
```typescript
|
||||||
|
// TypeScript errors after upgrade
|
||||||
|
const response = await axios.get('/api/data');
|
||||||
|
// Property 'someProperty' does not exist on type 'any'
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```typescript
|
||||||
|
// Define proper interfaces
|
||||||
|
interface ApiResponse {
|
||||||
|
data: any;
|
||||||
|
message: string;
|
||||||
|
success: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await axios.get<ApiResponse>('/api/data');
|
||||||
|
// Now properly typed
|
||||||
|
console.log(response.data.data);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Debug Migration Issues
|
||||||
|
|
||||||
|
#### Enable Debug Logging
|
||||||
|
```javascript
|
||||||
|
// Add request/response logging
|
||||||
|
axios.interceptors.request.use(config => {
|
||||||
|
console.log('Request:', config);
|
||||||
|
return config;
|
||||||
|
});
|
||||||
|
|
||||||
|
axios.interceptors.response.use(
|
||||||
|
response => {
|
||||||
|
console.log('Response:', response);
|
||||||
|
return response;
|
||||||
|
},
|
||||||
|
error => {
|
||||||
|
console.log('Error:', error);
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Compare Behavior
|
||||||
|
```javascript
|
||||||
|
// Create side-by-side comparison during migration
|
||||||
|
const axios0x = require('axios-0x'); // Keep old version for testing
|
||||||
|
const axios1x = require('axios');
|
||||||
|
|
||||||
|
async function compareRequests(config) {
|
||||||
|
try {
|
||||||
|
const [result0x, result1x] = await Promise.allSettled([
|
||||||
|
axios0x(config),
|
||||||
|
axios1x(config)
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log('0.x result:', result0x);
|
||||||
|
console.log('1.x result:', result1x);
|
||||||
|
} catch (error) {
|
||||||
|
console.log('Comparison error:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
### Official Documentation
|
||||||
|
- [Axios 1.x Documentation](https://axios-http.com/)
|
||||||
|
- [Axios GitHub Repository](https://github.com/axios/axios)
|
||||||
|
- [Axios Changelog](https://github.com/axios/axios/blob/main/CHANGELOG.md)
|
||||||
|
|
||||||
|
### Migration Tools
|
||||||
|
- [Axios Migration Codemod](https://github.com/axios/axios-migration-codemod) *(if available)*
|
||||||
|
- [ESLint Rules for Axios 1.x](https://github.com/axios/eslint-plugin-axios) *(if available)*
|
||||||
|
|
||||||
|
### Community Resources
|
||||||
|
- [Stack Overflow - Axios Migration Questions](https://stackoverflow.com/questions/tagged/axios+migration)
|
||||||
|
- [GitHub Discussions](https://github.com/axios/axios/discussions)
|
||||||
|
- [Axios Discord Community](https://discord.gg/axios) *(if available)*
|
||||||
|
|
||||||
|
### Related Issues
|
||||||
|
- [Error Handling Changes Discussion](https://github.com/axios/axios/issues/7208)
|
||||||
|
- [Migration Guide Request](https://github.com/axios/axios/issues/xxxx) *(link to related issues)*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Need Help?
|
||||||
|
|
||||||
|
If you encounter issues during migration that aren't covered in this guide:
|
||||||
|
|
||||||
|
1. **Search existing issues** in the [Axios GitHub repository](https://github.com/axios/axios/issues)
|
||||||
|
2. **Ask questions** in [GitHub Discussions](https://github.com/axios/axios/discussions)
|
||||||
|
3. **Contribute improvements** to this migration guide
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*This migration guide is maintained by the community. If you find errors or have suggestions, please [open an issue](https://github.com/axios/axios/issues) or submit a pull request.*
|
||||||
1921
server/node_modules/axios/README.md
generated
vendored
Normal file
1921
server/node_modules/axios/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4353
server/node_modules/axios/dist/axios.js
generated
vendored
Normal file
4353
server/node_modules/axios/dist/axios.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
server/node_modules/axios/dist/axios.js.map
generated
vendored
Normal file
1
server/node_modules/axios/dist/axios.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
5
server/node_modules/axios/dist/axios.min.js
generated
vendored
Normal file
5
server/node_modules/axios/dist/axios.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
server/node_modules/axios/dist/axios.min.js.map
generated
vendored
Normal file
1
server/node_modules/axios/dist/axios.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3979
server/node_modules/axios/dist/browser/axios.cjs
generated
vendored
Normal file
3979
server/node_modules/axios/dist/browser/axios.cjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
server/node_modules/axios/dist/browser/axios.cjs.map
generated
vendored
Normal file
1
server/node_modules/axios/dist/browser/axios.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4002
server/node_modules/axios/dist/esm/axios.js
generated
vendored
Normal file
4002
server/node_modules/axios/dist/esm/axios.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
server/node_modules/axios/dist/esm/axios.js.map
generated
vendored
Normal file
1
server/node_modules/axios/dist/esm/axios.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
server/node_modules/axios/dist/esm/axios.min.js
generated
vendored
Normal file
3
server/node_modules/axios/dist/esm/axios.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
server/node_modules/axios/dist/esm/axios.min.js.map
generated
vendored
Normal file
1
server/node_modules/axios/dist/esm/axios.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
5315
server/node_modules/axios/dist/node/axios.cjs
generated
vendored
Normal file
5315
server/node_modules/axios/dist/node/axios.cjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
server/node_modules/axios/dist/node/axios.cjs.map
generated
vendored
Normal file
1
server/node_modules/axios/dist/node/axios.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
591
server/node_modules/axios/index.d.cts
generated
vendored
Normal file
591
server/node_modules/axios/index.d.cts
generated
vendored
Normal file
@@ -0,0 +1,591 @@
|
|||||||
|
interface RawAxiosHeaders {
|
||||||
|
[key: string]: axios.AxiosHeaderValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
type MethodsHeaders = Partial<{
|
||||||
|
[Key in axios.Method as Lowercase<Key>]: AxiosHeaders;
|
||||||
|
} & {common: AxiosHeaders}>;
|
||||||
|
|
||||||
|
type AxiosHeaderMatcher = string | RegExp | ((this: AxiosHeaders, value: string, name: string) => boolean);
|
||||||
|
|
||||||
|
type AxiosHeaderParser = (this: AxiosHeaders, value: axios.AxiosHeaderValue, header: string) => any;
|
||||||
|
|
||||||
|
type CommonRequestHeadersList = 'Accept' | 'Content-Length' | 'User-Agent'| 'Content-Encoding' | 'Authorization';
|
||||||
|
|
||||||
|
type ContentType = axios.AxiosHeaderValue | 'text/html' | 'text/plain' | 'multipart/form-data' | 'application/json' | 'application/x-www-form-urlencoded' | 'application/octet-stream';
|
||||||
|
|
||||||
|
type CommonResponseHeadersList = 'Server' | 'Content-Type' | 'Content-Length' | 'Cache-Control'| 'Content-Encoding';
|
||||||
|
|
||||||
|
type BrowserProgressEvent = any;
|
||||||
|
|
||||||
|
declare class AxiosHeaders {
|
||||||
|
constructor(
|
||||||
|
headers?: RawAxiosHeaders | AxiosHeaders | string
|
||||||
|
);
|
||||||
|
|
||||||
|
[key: string]: any;
|
||||||
|
|
||||||
|
set(headerName?: string, value?: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
|
||||||
|
set(headers?: RawAxiosHeaders | AxiosHeaders | string, rewrite?: boolean): AxiosHeaders;
|
||||||
|
|
||||||
|
get(headerName: string, parser: RegExp): RegExpExecArray | null;
|
||||||
|
get(headerName: string, matcher?: true | AxiosHeaderParser): axios.AxiosHeaderValue;
|
||||||
|
|
||||||
|
has(header: string, matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
delete(header: string | string[], matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
clear(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
normalize(format: boolean): AxiosHeaders;
|
||||||
|
|
||||||
|
concat(...targets: Array<AxiosHeaders | RawAxiosHeaders | string | undefined | null>): AxiosHeaders;
|
||||||
|
|
||||||
|
toJSON(asStrings?: boolean): RawAxiosHeaders;
|
||||||
|
|
||||||
|
static from(thing?: AxiosHeaders | RawAxiosHeaders | string): AxiosHeaders;
|
||||||
|
|
||||||
|
static accessor(header: string | string[]): AxiosHeaders;
|
||||||
|
|
||||||
|
static concat(...targets: Array<AxiosHeaders | RawAxiosHeaders | string | undefined | null>): AxiosHeaders;
|
||||||
|
|
||||||
|
setContentType(value: ContentType, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
|
||||||
|
getContentType(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getContentType(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
|
||||||
|
hasContentType(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setContentLength(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
|
||||||
|
getContentLength(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getContentLength(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
|
||||||
|
hasContentLength(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setAccept(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
|
||||||
|
getAccept(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getAccept(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
|
||||||
|
hasAccept(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setUserAgent(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
|
||||||
|
getUserAgent(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getUserAgent(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
|
||||||
|
hasUserAgent(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setContentEncoding(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
|
||||||
|
getContentEncoding(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getContentEncoding(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
|
||||||
|
hasContentEncoding(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setAuthorization(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
|
||||||
|
getAuthorization(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getAuthorization(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
|
||||||
|
hasAuthorization(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
getSetCookie(): string[];
|
||||||
|
|
||||||
|
[Symbol.iterator](): IterableIterator<[string, axios.AxiosHeaderValue]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare class AxiosError<T = unknown, D = any> extends Error {
|
||||||
|
constructor(
|
||||||
|
message?: string,
|
||||||
|
code?: string,
|
||||||
|
config?: axios.InternalAxiosRequestConfig<D>,
|
||||||
|
request?: any,
|
||||||
|
response?: axios.AxiosResponse<T, D>
|
||||||
|
);
|
||||||
|
|
||||||
|
config?: axios.InternalAxiosRequestConfig<D>;
|
||||||
|
code?: string;
|
||||||
|
request?: any;
|
||||||
|
response?: axios.AxiosResponse<T, D>;
|
||||||
|
isAxiosError: boolean;
|
||||||
|
status?: number;
|
||||||
|
toJSON: () => object;
|
||||||
|
cause?: Error;
|
||||||
|
event?: BrowserProgressEvent;
|
||||||
|
static from<T = unknown, D = any>(
|
||||||
|
error: Error | unknown,
|
||||||
|
code?: string,
|
||||||
|
config?: axios.InternalAxiosRequestConfig<D>,
|
||||||
|
request?: any,
|
||||||
|
response?: axios.AxiosResponse<T, D>,
|
||||||
|
customProps?: object,
|
||||||
|
): AxiosError<T, D>;
|
||||||
|
static readonly ERR_FR_TOO_MANY_REDIRECTS = "ERR_FR_TOO_MANY_REDIRECTS";
|
||||||
|
static readonly ERR_BAD_OPTION_VALUE = "ERR_BAD_OPTION_VALUE";
|
||||||
|
static readonly ERR_BAD_OPTION = "ERR_BAD_OPTION";
|
||||||
|
static readonly ERR_NETWORK = "ERR_NETWORK";
|
||||||
|
static readonly ERR_DEPRECATED = "ERR_DEPRECATED";
|
||||||
|
static readonly ERR_BAD_RESPONSE = "ERR_BAD_RESPONSE";
|
||||||
|
static readonly ERR_BAD_REQUEST = "ERR_BAD_REQUEST";
|
||||||
|
static readonly ERR_NOT_SUPPORT = "ERR_NOT_SUPPORT";
|
||||||
|
static readonly ERR_INVALID_URL = "ERR_INVALID_URL";
|
||||||
|
static readonly ERR_CANCELED = "ERR_CANCELED";
|
||||||
|
static readonly ECONNABORTED = "ECONNABORTED";
|
||||||
|
static readonly ETIMEDOUT = "ETIMEDOUT";
|
||||||
|
}
|
||||||
|
|
||||||
|
declare class CanceledError<T> extends AxiosError<T> {
|
||||||
|
}
|
||||||
|
|
||||||
|
declare class Axios {
|
||||||
|
constructor(config?: axios.AxiosRequestConfig);
|
||||||
|
defaults: axios.AxiosDefaults;
|
||||||
|
interceptors: {
|
||||||
|
request: axios.AxiosInterceptorManager<axios.InternalAxiosRequestConfig>;
|
||||||
|
response: axios.AxiosInterceptorManager<axios.AxiosResponse>;
|
||||||
|
};
|
||||||
|
getUri(config?: axios.AxiosRequestConfig): string;
|
||||||
|
request<T = any, R = axios.AxiosResponse<T>, D = any>(config: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
get<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
delete<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
head<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
options<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
post<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
put<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
patch<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
postForm<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
putForm<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
patchForm<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare enum HttpStatusCode {
|
||||||
|
Continue = 100,
|
||||||
|
SwitchingProtocols = 101,
|
||||||
|
Processing = 102,
|
||||||
|
EarlyHints = 103,
|
||||||
|
Ok = 200,
|
||||||
|
Created = 201,
|
||||||
|
Accepted = 202,
|
||||||
|
NonAuthoritativeInformation = 203,
|
||||||
|
NoContent = 204,
|
||||||
|
ResetContent = 205,
|
||||||
|
PartialContent = 206,
|
||||||
|
MultiStatus = 207,
|
||||||
|
AlreadyReported = 208,
|
||||||
|
ImUsed = 226,
|
||||||
|
MultipleChoices = 300,
|
||||||
|
MovedPermanently = 301,
|
||||||
|
Found = 302,
|
||||||
|
SeeOther = 303,
|
||||||
|
NotModified = 304,
|
||||||
|
UseProxy = 305,
|
||||||
|
Unused = 306,
|
||||||
|
TemporaryRedirect = 307,
|
||||||
|
PermanentRedirect = 308,
|
||||||
|
BadRequest = 400,
|
||||||
|
Unauthorized = 401,
|
||||||
|
PaymentRequired = 402,
|
||||||
|
Forbidden = 403,
|
||||||
|
NotFound = 404,
|
||||||
|
MethodNotAllowed = 405,
|
||||||
|
NotAcceptable = 406,
|
||||||
|
ProxyAuthenticationRequired = 407,
|
||||||
|
RequestTimeout = 408,
|
||||||
|
Conflict = 409,
|
||||||
|
Gone = 410,
|
||||||
|
LengthRequired = 411,
|
||||||
|
PreconditionFailed = 412,
|
||||||
|
PayloadTooLarge = 413,
|
||||||
|
UriTooLong = 414,
|
||||||
|
UnsupportedMediaType = 415,
|
||||||
|
RangeNotSatisfiable = 416,
|
||||||
|
ExpectationFailed = 417,
|
||||||
|
ImATeapot = 418,
|
||||||
|
MisdirectedRequest = 421,
|
||||||
|
UnprocessableEntity = 422,
|
||||||
|
Locked = 423,
|
||||||
|
FailedDependency = 424,
|
||||||
|
TooEarly = 425,
|
||||||
|
UpgradeRequired = 426,
|
||||||
|
PreconditionRequired = 428,
|
||||||
|
TooManyRequests = 429,
|
||||||
|
RequestHeaderFieldsTooLarge = 431,
|
||||||
|
UnavailableForLegalReasons = 451,
|
||||||
|
InternalServerError = 500,
|
||||||
|
NotImplemented = 501,
|
||||||
|
BadGateway = 502,
|
||||||
|
ServiceUnavailable = 503,
|
||||||
|
GatewayTimeout = 504,
|
||||||
|
HttpVersionNotSupported = 505,
|
||||||
|
VariantAlsoNegotiates = 506,
|
||||||
|
InsufficientStorage = 507,
|
||||||
|
LoopDetected = 508,
|
||||||
|
NotExtended = 510,
|
||||||
|
NetworkAuthenticationRequired = 511,
|
||||||
|
}
|
||||||
|
|
||||||
|
type InternalAxiosError<T = unknown, D = any> = AxiosError<T, D>;
|
||||||
|
|
||||||
|
declare namespace axios {
|
||||||
|
type AxiosError<T = unknown, D = any> = InternalAxiosError<T, D>;
|
||||||
|
|
||||||
|
type RawAxiosRequestHeaders = Partial<RawAxiosHeaders & {
|
||||||
|
[Key in CommonRequestHeadersList]: AxiosHeaderValue;
|
||||||
|
} & {
|
||||||
|
'Content-Type': ContentType
|
||||||
|
}>;
|
||||||
|
|
||||||
|
type AxiosRequestHeaders = RawAxiosRequestHeaders & AxiosHeaders;
|
||||||
|
|
||||||
|
type AxiosHeaderValue = AxiosHeaders | string | string[] | number | boolean | null;
|
||||||
|
|
||||||
|
type RawCommonResponseHeaders = {
|
||||||
|
[Key in CommonResponseHeadersList]: AxiosHeaderValue;
|
||||||
|
} & {
|
||||||
|
"set-cookie": string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
type RawAxiosResponseHeaders = Partial<RawAxiosHeaders & RawCommonResponseHeaders>;
|
||||||
|
|
||||||
|
type AxiosResponseHeaders = RawAxiosResponseHeaders & AxiosHeaders;
|
||||||
|
|
||||||
|
interface AxiosRequestTransformer {
|
||||||
|
(this: InternalAxiosRequestConfig, data: any, headers: AxiosRequestHeaders): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosResponseTransformer {
|
||||||
|
(this: InternalAxiosRequestConfig, data: any, headers: AxiosResponseHeaders, status?: number): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosAdapter {
|
||||||
|
(config: InternalAxiosRequestConfig): AxiosPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosBasicCredentials {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosProxyConfig {
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
auth?: AxiosBasicCredentials;
|
||||||
|
protocol?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Method =
|
||||||
|
| 'get' | 'GET'
|
||||||
|
| 'delete' | 'DELETE'
|
||||||
|
| 'head' | 'HEAD'
|
||||||
|
| 'options' | 'OPTIONS'
|
||||||
|
| 'post' | 'POST'
|
||||||
|
| 'put' | 'PUT'
|
||||||
|
| 'patch' | 'PATCH'
|
||||||
|
| 'purge' | 'PURGE'
|
||||||
|
| 'link' | 'LINK'
|
||||||
|
| 'unlink' | 'UNLINK';
|
||||||
|
|
||||||
|
type ResponseType =
|
||||||
|
| 'arraybuffer'
|
||||||
|
| 'blob'
|
||||||
|
| 'document'
|
||||||
|
| 'json'
|
||||||
|
| 'text'
|
||||||
|
| 'stream'
|
||||||
|
| 'formdata';
|
||||||
|
|
||||||
|
type responseEncoding =
|
||||||
|
| 'ascii' | 'ASCII'
|
||||||
|
| 'ansi' | 'ANSI'
|
||||||
|
| 'binary' | 'BINARY'
|
||||||
|
| 'base64' | 'BASE64'
|
||||||
|
| 'base64url' | 'BASE64URL'
|
||||||
|
| 'hex' | 'HEX'
|
||||||
|
| 'latin1' | 'LATIN1'
|
||||||
|
| 'ucs-2' | 'UCS-2'
|
||||||
|
| 'ucs2' | 'UCS2'
|
||||||
|
| 'utf-8' | 'UTF-8'
|
||||||
|
| 'utf8' | 'UTF8'
|
||||||
|
| 'utf16le' | 'UTF16LE';
|
||||||
|
|
||||||
|
interface TransitionalOptions {
|
||||||
|
silentJSONParsing?: boolean;
|
||||||
|
forcedJSONParsing?: boolean;
|
||||||
|
clarifyTimeoutError?: boolean;
|
||||||
|
legacyInterceptorReqResOrdering?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GenericAbortSignal {
|
||||||
|
readonly aborted: boolean;
|
||||||
|
onabort?: ((...args: any) => any) | null;
|
||||||
|
addEventListener?: (...args: any) => any;
|
||||||
|
removeEventListener?: (...args: any) => any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FormDataVisitorHelpers {
|
||||||
|
defaultVisitor: SerializerVisitor;
|
||||||
|
convertValue: (value: any) => any;
|
||||||
|
isVisitable: (value: any) => boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SerializerVisitor {
|
||||||
|
(
|
||||||
|
this: GenericFormData,
|
||||||
|
value: any,
|
||||||
|
key: string | number,
|
||||||
|
path: null | Array<string | number>,
|
||||||
|
helpers: FormDataVisitorHelpers
|
||||||
|
): boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SerializerOptions {
|
||||||
|
visitor?: SerializerVisitor;
|
||||||
|
dots?: boolean;
|
||||||
|
metaTokens?: boolean;
|
||||||
|
indexes?: boolean | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// tslint:disable-next-line
|
||||||
|
interface FormSerializerOptions extends SerializerOptions {
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ParamEncoder {
|
||||||
|
(value: any, defaultEncoder: (value: any) => any): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CustomParamsSerializer {
|
||||||
|
(params: Record<string, any>, options?: ParamsSerializerOptions): string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ParamsSerializerOptions extends SerializerOptions {
|
||||||
|
encode?: ParamEncoder;
|
||||||
|
serialize?: CustomParamsSerializer;
|
||||||
|
}
|
||||||
|
|
||||||
|
type MaxUploadRate = number;
|
||||||
|
|
||||||
|
type MaxDownloadRate = number;
|
||||||
|
|
||||||
|
interface AxiosProgressEvent {
|
||||||
|
loaded: number;
|
||||||
|
total?: number;
|
||||||
|
progress?: number;
|
||||||
|
bytes: number;
|
||||||
|
rate?: number;
|
||||||
|
estimated?: number;
|
||||||
|
upload?: boolean;
|
||||||
|
download?: boolean;
|
||||||
|
event?: BrowserProgressEvent;
|
||||||
|
lengthComputable: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Milliseconds = number;
|
||||||
|
|
||||||
|
type AxiosAdapterName = 'fetch' | 'xhr' | 'http' | (string & {});
|
||||||
|
|
||||||
|
type AxiosAdapterConfig = AxiosAdapter | AxiosAdapterName;
|
||||||
|
|
||||||
|
type AddressFamily = 4 | 6 | undefined;
|
||||||
|
|
||||||
|
interface LookupAddressEntry {
|
||||||
|
address: string;
|
||||||
|
family?: AddressFamily;
|
||||||
|
}
|
||||||
|
|
||||||
|
type LookupAddress = string | LookupAddressEntry;
|
||||||
|
|
||||||
|
interface AxiosRequestConfig<D = any> {
|
||||||
|
url?: string;
|
||||||
|
method?: Method | string;
|
||||||
|
baseURL?: string;
|
||||||
|
allowAbsoluteUrls?: boolean;
|
||||||
|
transformRequest?: AxiosRequestTransformer | AxiosRequestTransformer[];
|
||||||
|
transformResponse?: AxiosResponseTransformer | AxiosResponseTransformer[];
|
||||||
|
headers?: (RawAxiosRequestHeaders & MethodsHeaders) | AxiosHeaders;
|
||||||
|
params?: any;
|
||||||
|
paramsSerializer?: ParamsSerializerOptions | CustomParamsSerializer;
|
||||||
|
data?: D;
|
||||||
|
timeout?: Milliseconds;
|
||||||
|
timeoutErrorMessage?: string;
|
||||||
|
withCredentials?: boolean;
|
||||||
|
adapter?: AxiosAdapterConfig | AxiosAdapterConfig[];
|
||||||
|
auth?: AxiosBasicCredentials;
|
||||||
|
responseType?: ResponseType;
|
||||||
|
responseEncoding?: responseEncoding | string;
|
||||||
|
xsrfCookieName?: string;
|
||||||
|
xsrfHeaderName?: string;
|
||||||
|
onUploadProgress?: (progressEvent: AxiosProgressEvent) => void;
|
||||||
|
onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void;
|
||||||
|
maxContentLength?: number;
|
||||||
|
validateStatus?: ((status: number) => boolean) | null;
|
||||||
|
maxBodyLength?: number;
|
||||||
|
maxRedirects?: number;
|
||||||
|
maxRate?: number | [MaxUploadRate, MaxDownloadRate];
|
||||||
|
beforeRedirect?: (options: Record<string, any>, responseDetails: {headers: Record<string, string>, statusCode: HttpStatusCode}) => void;
|
||||||
|
socketPath?: string | null;
|
||||||
|
transport?: any;
|
||||||
|
httpAgent?: any;
|
||||||
|
httpsAgent?: any;
|
||||||
|
proxy?: AxiosProxyConfig | false;
|
||||||
|
cancelToken?: CancelToken;
|
||||||
|
decompress?: boolean;
|
||||||
|
transitional?: TransitionalOptions;
|
||||||
|
signal?: GenericAbortSignal;
|
||||||
|
insecureHTTPParser?: boolean;
|
||||||
|
env?: {
|
||||||
|
FormData?: new (...args: any[]) => object;
|
||||||
|
fetch?: (input: URL | Request | string, init?: RequestInit) => Promise<Response>;
|
||||||
|
Request?: new (input: URL | Request | string, init?: RequestInit) => Request;
|
||||||
|
Response?: new (
|
||||||
|
body?: ArrayBuffer | ArrayBufferView | Blob | FormData | URLSearchParams | string | null,
|
||||||
|
init?: ResponseInit
|
||||||
|
) => Response;
|
||||||
|
};
|
||||||
|
formSerializer?: FormSerializerOptions;
|
||||||
|
family?: AddressFamily;
|
||||||
|
lookup?: ((hostname: string, options: object, cb: (err: Error | null, address: LookupAddress | LookupAddress[], family?: AddressFamily) => void) => void) |
|
||||||
|
((hostname: string, options: object) => Promise<[address: LookupAddressEntry | LookupAddressEntry[], family?: AddressFamily] | LookupAddress>);
|
||||||
|
withXSRFToken?: boolean | ((config: InternalAxiosRequestConfig) => boolean | undefined);
|
||||||
|
fetchOptions?: Omit<RequestInit, 'body' | 'headers' | 'method' | 'signal'> | Record<string, any>;
|
||||||
|
httpVersion?: 1 | 2;
|
||||||
|
http2Options?: Record<string, any> & {
|
||||||
|
sessionTimeout?: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Alias
|
||||||
|
type RawAxiosRequestConfig<D = any> = AxiosRequestConfig<D>;
|
||||||
|
|
||||||
|
interface InternalAxiosRequestConfig<D = any> extends AxiosRequestConfig<D> {
|
||||||
|
headers: AxiosRequestHeaders;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface HeadersDefaults {
|
||||||
|
common: RawAxiosRequestHeaders;
|
||||||
|
delete: RawAxiosRequestHeaders;
|
||||||
|
get: RawAxiosRequestHeaders;
|
||||||
|
head: RawAxiosRequestHeaders;
|
||||||
|
post: RawAxiosRequestHeaders;
|
||||||
|
put: RawAxiosRequestHeaders;
|
||||||
|
patch: RawAxiosRequestHeaders;
|
||||||
|
options?: RawAxiosRequestHeaders;
|
||||||
|
purge?: RawAxiosRequestHeaders;
|
||||||
|
link?: RawAxiosRequestHeaders;
|
||||||
|
unlink?: RawAxiosRequestHeaders;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
|
||||||
|
headers: HeadersDefaults;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CreateAxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
|
||||||
|
headers?: RawAxiosRequestHeaders | AxiosHeaders | Partial<HeadersDefaults>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosResponse<T = any, D = any, H = {}> {
|
||||||
|
data: T;
|
||||||
|
status: number;
|
||||||
|
statusText: string;
|
||||||
|
headers: H & RawAxiosResponseHeaders | AxiosResponseHeaders;
|
||||||
|
config: InternalAxiosRequestConfig<D>;
|
||||||
|
request?: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
type AxiosPromise<T = any> = Promise<AxiosResponse<T>>;
|
||||||
|
|
||||||
|
interface CancelStatic {
|
||||||
|
new (message?: string): Cancel;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Cancel {
|
||||||
|
message: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Canceler {
|
||||||
|
(message?: string, config?: AxiosRequestConfig, request?: any): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CancelTokenStatic {
|
||||||
|
new (executor: (cancel: Canceler) => void): CancelToken;
|
||||||
|
source(): CancelTokenSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CancelToken {
|
||||||
|
promise: Promise<Cancel>;
|
||||||
|
reason?: Cancel;
|
||||||
|
throwIfRequested(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CancelTokenSource {
|
||||||
|
token: CancelToken;
|
||||||
|
cancel: Canceler;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosInterceptorOptions {
|
||||||
|
synchronous?: boolean;
|
||||||
|
runWhen?: (config: InternalAxiosRequestConfig) => boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
type AxiosInterceptorFulfilled<T> = (value: T) => T | Promise<T>;
|
||||||
|
type AxiosInterceptorRejected = (error: any) => any;
|
||||||
|
|
||||||
|
type AxiosRequestInterceptorUse<T> = (
|
||||||
|
onFulfilled?: AxiosInterceptorFulfilled<T> | null,
|
||||||
|
onRejected?: AxiosInterceptorRejected | null,
|
||||||
|
options?: AxiosInterceptorOptions
|
||||||
|
) => number;
|
||||||
|
|
||||||
|
type AxiosResponseInterceptorUse<T> = (
|
||||||
|
onFulfilled?: AxiosInterceptorFulfilled<T> | null,
|
||||||
|
onRejected?: AxiosInterceptorRejected | null
|
||||||
|
) => number;
|
||||||
|
|
||||||
|
interface AxiosInterceptorHandler<T> {
|
||||||
|
fulfilled: AxiosInterceptorFulfilled<T>;
|
||||||
|
rejected?: AxiosInterceptorRejected;
|
||||||
|
synchronous: boolean;
|
||||||
|
runWhen?: (config: AxiosRequestConfig) => boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosInterceptorManager<V> {
|
||||||
|
use: V extends AxiosResponse ? AxiosResponseInterceptorUse<V> : AxiosRequestInterceptorUse<V>;
|
||||||
|
eject(id: number): void;
|
||||||
|
clear(): void;
|
||||||
|
handlers?: Array<AxiosInterceptorHandler<V>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosInstance extends Axios {
|
||||||
|
<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
|
||||||
|
|
||||||
|
create(config?: CreateAxiosDefaults): AxiosInstance;
|
||||||
|
defaults: Omit<AxiosDefaults, 'headers'> & {
|
||||||
|
headers: HeadersDefaults & {
|
||||||
|
[key: string]: AxiosHeaderValue
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GenericFormData {
|
||||||
|
append(name: string, value: any, options?: any): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GenericHTMLFormElement {
|
||||||
|
name: string;
|
||||||
|
method: string;
|
||||||
|
submit(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AxiosStatic extends AxiosInstance {
|
||||||
|
Cancel: CancelStatic;
|
||||||
|
CancelToken: CancelTokenStatic;
|
||||||
|
Axios: typeof Axios;
|
||||||
|
AxiosError: typeof AxiosError;
|
||||||
|
CanceledError: typeof CanceledError;
|
||||||
|
HttpStatusCode: typeof HttpStatusCode;
|
||||||
|
readonly VERSION: string;
|
||||||
|
isCancel(value: any): value is Cancel;
|
||||||
|
all<T>(values: Array<T | Promise<T>>): Promise<T[]>;
|
||||||
|
spread<T, R>(callback: (...args: T[]) => R): (array: T[]) => R;
|
||||||
|
isAxiosError<T = any, D = any>(payload: any): payload is AxiosError<T, D>;
|
||||||
|
toFormData(sourceObj: object, targetFormData?: GenericFormData, options?: FormSerializerOptions): GenericFormData;
|
||||||
|
formToJSON(form: GenericFormData|GenericHTMLFormElement): object;
|
||||||
|
getAdapter(adapters: AxiosAdapterConfig | AxiosAdapterConfig[] | undefined): AxiosAdapter;
|
||||||
|
AxiosHeaders: typeof AxiosHeaders;
|
||||||
|
mergeConfig<D = any>(config1: AxiosRequestConfig<D>, config2: AxiosRequestConfig<D>): AxiosRequestConfig<D>;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
declare const axios: axios.AxiosStatic;
|
||||||
|
|
||||||
|
export = axios;
|
||||||
810
server/node_modules/axios/index.d.ts
generated
vendored
Normal file
810
server/node_modules/axios/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,810 @@
|
|||||||
|
// TypeScript Version: 4.7
|
||||||
|
type StringLiteralsOrString<Literals extends string> = Literals | (string & {});
|
||||||
|
|
||||||
|
export type AxiosHeaderValue =
|
||||||
|
| AxiosHeaders
|
||||||
|
| string
|
||||||
|
| string[]
|
||||||
|
| number
|
||||||
|
| boolean
|
||||||
|
| null;
|
||||||
|
|
||||||
|
interface RawAxiosHeaders {
|
||||||
|
[key: string]: AxiosHeaderValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
type MethodsHeaders = Partial<
|
||||||
|
{
|
||||||
|
[Key in Method as Lowercase<Key>]: AxiosHeaders;
|
||||||
|
} & { common: AxiosHeaders }
|
||||||
|
>;
|
||||||
|
|
||||||
|
type AxiosHeaderMatcher =
|
||||||
|
| string
|
||||||
|
| RegExp
|
||||||
|
| ((this: AxiosHeaders, value: string, name: string) => boolean);
|
||||||
|
|
||||||
|
type AxiosHeaderParser = (
|
||||||
|
this: AxiosHeaders,
|
||||||
|
value: AxiosHeaderValue,
|
||||||
|
header: string,
|
||||||
|
) => any;
|
||||||
|
|
||||||
|
export class AxiosHeaders {
|
||||||
|
constructor(headers?: RawAxiosHeaders | AxiosHeaders | string);
|
||||||
|
|
||||||
|
[key: string]: any;
|
||||||
|
|
||||||
|
set(
|
||||||
|
headerName?: string,
|
||||||
|
value?: AxiosHeaderValue,
|
||||||
|
rewrite?: boolean | AxiosHeaderMatcher,
|
||||||
|
): AxiosHeaders;
|
||||||
|
set(
|
||||||
|
headers?: RawAxiosHeaders | AxiosHeaders | string,
|
||||||
|
rewrite?: boolean,
|
||||||
|
): AxiosHeaders;
|
||||||
|
|
||||||
|
get(headerName: string, parser: RegExp): RegExpExecArray | null;
|
||||||
|
get(headerName: string, matcher?: true | AxiosHeaderParser): AxiosHeaderValue;
|
||||||
|
|
||||||
|
has(header: string, matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
delete(header: string | string[], matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
clear(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
normalize(format: boolean): AxiosHeaders;
|
||||||
|
|
||||||
|
concat(
|
||||||
|
...targets: Array<
|
||||||
|
AxiosHeaders | RawAxiosHeaders | string | undefined | null
|
||||||
|
>
|
||||||
|
): AxiosHeaders;
|
||||||
|
|
||||||
|
toJSON(asStrings?: boolean): RawAxiosHeaders;
|
||||||
|
|
||||||
|
static from(thing?: AxiosHeaders | RawAxiosHeaders | string): AxiosHeaders;
|
||||||
|
|
||||||
|
static accessor(header: string | string[]): AxiosHeaders;
|
||||||
|
|
||||||
|
static concat(
|
||||||
|
...targets: Array<
|
||||||
|
AxiosHeaders | RawAxiosHeaders | string | undefined | null
|
||||||
|
>
|
||||||
|
): AxiosHeaders;
|
||||||
|
|
||||||
|
setContentType(
|
||||||
|
value: ContentType,
|
||||||
|
rewrite?: boolean | AxiosHeaderMatcher,
|
||||||
|
): AxiosHeaders;
|
||||||
|
getContentType(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getContentType(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
|
||||||
|
hasContentType(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setContentLength(
|
||||||
|
value: AxiosHeaderValue,
|
||||||
|
rewrite?: boolean | AxiosHeaderMatcher,
|
||||||
|
): AxiosHeaders;
|
||||||
|
getContentLength(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getContentLength(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
|
||||||
|
hasContentLength(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setAccept(
|
||||||
|
value: AxiosHeaderValue,
|
||||||
|
rewrite?: boolean | AxiosHeaderMatcher,
|
||||||
|
): AxiosHeaders;
|
||||||
|
getAccept(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getAccept(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
|
||||||
|
hasAccept(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setUserAgent(
|
||||||
|
value: AxiosHeaderValue,
|
||||||
|
rewrite?: boolean | AxiosHeaderMatcher,
|
||||||
|
): AxiosHeaders;
|
||||||
|
getUserAgent(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getUserAgent(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
|
||||||
|
hasUserAgent(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setContentEncoding(
|
||||||
|
value: AxiosHeaderValue,
|
||||||
|
rewrite?: boolean | AxiosHeaderMatcher,
|
||||||
|
): AxiosHeaders;
|
||||||
|
getContentEncoding(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getContentEncoding(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
|
||||||
|
hasContentEncoding(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
setAuthorization(
|
||||||
|
value: AxiosHeaderValue,
|
||||||
|
rewrite?: boolean | AxiosHeaderMatcher,
|
||||||
|
): AxiosHeaders;
|
||||||
|
getAuthorization(parser?: RegExp): RegExpExecArray | null;
|
||||||
|
getAuthorization(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
|
||||||
|
hasAuthorization(matcher?: AxiosHeaderMatcher): boolean;
|
||||||
|
|
||||||
|
getSetCookie(): string[];
|
||||||
|
|
||||||
|
[Symbol.iterator](): IterableIterator<[string, AxiosHeaderValue]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
type CommonRequestHeadersList =
|
||||||
|
| "Accept"
|
||||||
|
| "Content-Length"
|
||||||
|
| "User-Agent"
|
||||||
|
| "Content-Encoding"
|
||||||
|
| "Authorization";
|
||||||
|
|
||||||
|
type ContentType =
|
||||||
|
| AxiosHeaderValue
|
||||||
|
| "text/html"
|
||||||
|
| "text/plain"
|
||||||
|
| "multipart/form-data"
|
||||||
|
| "application/json"
|
||||||
|
| "application/x-www-form-urlencoded"
|
||||||
|
| "application/octet-stream";
|
||||||
|
|
||||||
|
export type RawAxiosRequestHeaders = Partial<
|
||||||
|
RawAxiosHeaders & {
|
||||||
|
[Key in CommonRequestHeadersList]: AxiosHeaderValue;
|
||||||
|
} & {
|
||||||
|
"Content-Type": ContentType;
|
||||||
|
}
|
||||||
|
>;
|
||||||
|
|
||||||
|
export type AxiosRequestHeaders = RawAxiosRequestHeaders & AxiosHeaders;
|
||||||
|
|
||||||
|
type CommonResponseHeadersList =
|
||||||
|
| "Server"
|
||||||
|
| "Content-Type"
|
||||||
|
| "Content-Length"
|
||||||
|
| "Cache-Control"
|
||||||
|
| "Content-Encoding";
|
||||||
|
|
||||||
|
type RawCommonResponseHeaders = {
|
||||||
|
[Key in CommonResponseHeadersList]: AxiosHeaderValue;
|
||||||
|
} & {
|
||||||
|
"set-cookie": string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type RawAxiosResponseHeaders = Partial<
|
||||||
|
RawAxiosHeaders & RawCommonResponseHeaders
|
||||||
|
>;
|
||||||
|
|
||||||
|
export type AxiosResponseHeaders = RawAxiosResponseHeaders & AxiosHeaders;
|
||||||
|
|
||||||
|
export interface AxiosRequestTransformer {
|
||||||
|
(
|
||||||
|
this: InternalAxiosRequestConfig,
|
||||||
|
data: any,
|
||||||
|
headers: AxiosRequestHeaders,
|
||||||
|
): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosResponseTransformer {
|
||||||
|
(
|
||||||
|
this: InternalAxiosRequestConfig,
|
||||||
|
data: any,
|
||||||
|
headers: AxiosResponseHeaders,
|
||||||
|
status?: number,
|
||||||
|
): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosAdapter {
|
||||||
|
(config: InternalAxiosRequestConfig): AxiosPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosBasicCredentials {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosProxyConfig {
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
auth?: AxiosBasicCredentials;
|
||||||
|
protocol?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum HttpStatusCode {
|
||||||
|
Continue = 100,
|
||||||
|
SwitchingProtocols = 101,
|
||||||
|
Processing = 102,
|
||||||
|
EarlyHints = 103,
|
||||||
|
Ok = 200,
|
||||||
|
Created = 201,
|
||||||
|
Accepted = 202,
|
||||||
|
NonAuthoritativeInformation = 203,
|
||||||
|
NoContent = 204,
|
||||||
|
ResetContent = 205,
|
||||||
|
PartialContent = 206,
|
||||||
|
MultiStatus = 207,
|
||||||
|
AlreadyReported = 208,
|
||||||
|
ImUsed = 226,
|
||||||
|
MultipleChoices = 300,
|
||||||
|
MovedPermanently = 301,
|
||||||
|
Found = 302,
|
||||||
|
SeeOther = 303,
|
||||||
|
NotModified = 304,
|
||||||
|
UseProxy = 305,
|
||||||
|
Unused = 306,
|
||||||
|
TemporaryRedirect = 307,
|
||||||
|
PermanentRedirect = 308,
|
||||||
|
BadRequest = 400,
|
||||||
|
Unauthorized = 401,
|
||||||
|
PaymentRequired = 402,
|
||||||
|
Forbidden = 403,
|
||||||
|
NotFound = 404,
|
||||||
|
MethodNotAllowed = 405,
|
||||||
|
NotAcceptable = 406,
|
||||||
|
ProxyAuthenticationRequired = 407,
|
||||||
|
RequestTimeout = 408,
|
||||||
|
Conflict = 409,
|
||||||
|
Gone = 410,
|
||||||
|
LengthRequired = 411,
|
||||||
|
PreconditionFailed = 412,
|
||||||
|
PayloadTooLarge = 413,
|
||||||
|
UriTooLong = 414,
|
||||||
|
UnsupportedMediaType = 415,
|
||||||
|
RangeNotSatisfiable = 416,
|
||||||
|
ExpectationFailed = 417,
|
||||||
|
ImATeapot = 418,
|
||||||
|
MisdirectedRequest = 421,
|
||||||
|
UnprocessableEntity = 422,
|
||||||
|
Locked = 423,
|
||||||
|
FailedDependency = 424,
|
||||||
|
TooEarly = 425,
|
||||||
|
UpgradeRequired = 426,
|
||||||
|
PreconditionRequired = 428,
|
||||||
|
TooManyRequests = 429,
|
||||||
|
RequestHeaderFieldsTooLarge = 431,
|
||||||
|
UnavailableForLegalReasons = 451,
|
||||||
|
InternalServerError = 500,
|
||||||
|
NotImplemented = 501,
|
||||||
|
BadGateway = 502,
|
||||||
|
ServiceUnavailable = 503,
|
||||||
|
GatewayTimeout = 504,
|
||||||
|
HttpVersionNotSupported = 505,
|
||||||
|
VariantAlsoNegotiates = 506,
|
||||||
|
InsufficientStorage = 507,
|
||||||
|
LoopDetected = 508,
|
||||||
|
NotExtended = 510,
|
||||||
|
NetworkAuthenticationRequired = 511,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Method =
|
||||||
|
| "get"
|
||||||
|
| "GET"
|
||||||
|
| "delete"
|
||||||
|
| "DELETE"
|
||||||
|
| "head"
|
||||||
|
| "HEAD"
|
||||||
|
| "options"
|
||||||
|
| "OPTIONS"
|
||||||
|
| "post"
|
||||||
|
| "POST"
|
||||||
|
| "put"
|
||||||
|
| "PUT"
|
||||||
|
| "patch"
|
||||||
|
| "PATCH"
|
||||||
|
| "purge"
|
||||||
|
| "PURGE"
|
||||||
|
| "link"
|
||||||
|
| "LINK"
|
||||||
|
| "unlink"
|
||||||
|
| "UNLINK";
|
||||||
|
|
||||||
|
export type ResponseType =
|
||||||
|
| "arraybuffer"
|
||||||
|
| "blob"
|
||||||
|
| "document"
|
||||||
|
| "json"
|
||||||
|
| "text"
|
||||||
|
| "stream"
|
||||||
|
| "formdata";
|
||||||
|
|
||||||
|
export type responseEncoding =
|
||||||
|
| "ascii"
|
||||||
|
| "ASCII"
|
||||||
|
| "ansi"
|
||||||
|
| "ANSI"
|
||||||
|
| "binary"
|
||||||
|
| "BINARY"
|
||||||
|
| "base64"
|
||||||
|
| "BASE64"
|
||||||
|
| "base64url"
|
||||||
|
| "BASE64URL"
|
||||||
|
| "hex"
|
||||||
|
| "HEX"
|
||||||
|
| "latin1"
|
||||||
|
| "LATIN1"
|
||||||
|
| "ucs-2"
|
||||||
|
| "UCS-2"
|
||||||
|
| "ucs2"
|
||||||
|
| "UCS2"
|
||||||
|
| "utf-8"
|
||||||
|
| "UTF-8"
|
||||||
|
| "utf8"
|
||||||
|
| "UTF8"
|
||||||
|
| "utf16le"
|
||||||
|
| "UTF16LE";
|
||||||
|
|
||||||
|
export interface TransitionalOptions {
|
||||||
|
silentJSONParsing?: boolean;
|
||||||
|
forcedJSONParsing?: boolean;
|
||||||
|
clarifyTimeoutError?: boolean;
|
||||||
|
legacyInterceptorReqResOrdering?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GenericAbortSignal {
|
||||||
|
readonly aborted: boolean;
|
||||||
|
onabort?: ((...args: any) => any) | null;
|
||||||
|
addEventListener?: (...args: any) => any;
|
||||||
|
removeEventListener?: (...args: any) => any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FormDataVisitorHelpers {
|
||||||
|
defaultVisitor: SerializerVisitor;
|
||||||
|
convertValue: (value: any) => any;
|
||||||
|
isVisitable: (value: any) => boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SerializerVisitor {
|
||||||
|
(
|
||||||
|
this: GenericFormData,
|
||||||
|
value: any,
|
||||||
|
key: string | number,
|
||||||
|
path: null | Array<string | number>,
|
||||||
|
helpers: FormDataVisitorHelpers,
|
||||||
|
): boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SerializerOptions {
|
||||||
|
visitor?: SerializerVisitor;
|
||||||
|
dots?: boolean;
|
||||||
|
metaTokens?: boolean;
|
||||||
|
indexes?: boolean | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// tslint:disable-next-line
|
||||||
|
export interface FormSerializerOptions extends SerializerOptions {}
|
||||||
|
|
||||||
|
export interface ParamEncoder {
|
||||||
|
(value: any, defaultEncoder: (value: any) => any): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CustomParamsSerializer {
|
||||||
|
(params: Record<string, any>, options?: ParamsSerializerOptions): string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParamsSerializerOptions extends SerializerOptions {
|
||||||
|
encode?: ParamEncoder;
|
||||||
|
serialize?: CustomParamsSerializer;
|
||||||
|
}
|
||||||
|
|
||||||
|
type MaxUploadRate = number;
|
||||||
|
|
||||||
|
type MaxDownloadRate = number;
|
||||||
|
|
||||||
|
type BrowserProgressEvent = any;
|
||||||
|
|
||||||
|
export interface AxiosProgressEvent {
|
||||||
|
loaded: number;
|
||||||
|
total?: number;
|
||||||
|
progress?: number;
|
||||||
|
bytes: number;
|
||||||
|
rate?: number;
|
||||||
|
estimated?: number;
|
||||||
|
upload?: boolean;
|
||||||
|
download?: boolean;
|
||||||
|
event?: BrowserProgressEvent;
|
||||||
|
lengthComputable: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Milliseconds = number;
|
||||||
|
|
||||||
|
type AxiosAdapterName = StringLiteralsOrString<"xhr" | "http" | "fetch">;
|
||||||
|
|
||||||
|
type AxiosAdapterConfig = AxiosAdapter | AxiosAdapterName;
|
||||||
|
|
||||||
|
export type AddressFamily = 4 | 6 | undefined;
|
||||||
|
|
||||||
|
export interface LookupAddressEntry {
|
||||||
|
address: string;
|
||||||
|
family?: AddressFamily;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type LookupAddress = string | LookupAddressEntry;
|
||||||
|
|
||||||
|
export interface AxiosRequestConfig<D = any> {
|
||||||
|
url?: string;
|
||||||
|
method?: StringLiteralsOrString<Method>;
|
||||||
|
baseURL?: string;
|
||||||
|
allowAbsoluteUrls?: boolean;
|
||||||
|
transformRequest?: AxiosRequestTransformer | AxiosRequestTransformer[];
|
||||||
|
transformResponse?: AxiosResponseTransformer | AxiosResponseTransformer[];
|
||||||
|
headers?: (RawAxiosRequestHeaders & MethodsHeaders) | AxiosHeaders;
|
||||||
|
params?: any;
|
||||||
|
paramsSerializer?: ParamsSerializerOptions | CustomParamsSerializer;
|
||||||
|
data?: D;
|
||||||
|
timeout?: Milliseconds;
|
||||||
|
timeoutErrorMessage?: string;
|
||||||
|
withCredentials?: boolean;
|
||||||
|
adapter?: AxiosAdapterConfig | AxiosAdapterConfig[];
|
||||||
|
auth?: AxiosBasicCredentials;
|
||||||
|
responseType?: ResponseType;
|
||||||
|
responseEncoding?: StringLiteralsOrString<responseEncoding>;
|
||||||
|
xsrfCookieName?: string;
|
||||||
|
xsrfHeaderName?: string;
|
||||||
|
onUploadProgress?: (progressEvent: AxiosProgressEvent) => void;
|
||||||
|
onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void;
|
||||||
|
maxContentLength?: number;
|
||||||
|
validateStatus?: ((status: number) => boolean) | null;
|
||||||
|
maxBodyLength?: number;
|
||||||
|
maxRedirects?: number;
|
||||||
|
maxRate?: number | [MaxUploadRate, MaxDownloadRate];
|
||||||
|
beforeRedirect?: (
|
||||||
|
options: Record<string, any>,
|
||||||
|
responseDetails: {
|
||||||
|
headers: Record<string, string>;
|
||||||
|
statusCode: HttpStatusCode;
|
||||||
|
},
|
||||||
|
) => void;
|
||||||
|
socketPath?: string | null;
|
||||||
|
transport?: any;
|
||||||
|
httpAgent?: any;
|
||||||
|
httpsAgent?: any;
|
||||||
|
proxy?: AxiosProxyConfig | false;
|
||||||
|
cancelToken?: CancelToken | undefined;
|
||||||
|
decompress?: boolean;
|
||||||
|
transitional?: TransitionalOptions;
|
||||||
|
signal?: GenericAbortSignal;
|
||||||
|
insecureHTTPParser?: boolean;
|
||||||
|
env?: {
|
||||||
|
FormData?: new (...args: any[]) => object;
|
||||||
|
fetch?: (
|
||||||
|
input: URL | Request | string,
|
||||||
|
init?: RequestInit,
|
||||||
|
) => Promise<Response>;
|
||||||
|
Request?: new (
|
||||||
|
input: URL | Request | string,
|
||||||
|
init?: RequestInit,
|
||||||
|
) => Request;
|
||||||
|
Response?: new (
|
||||||
|
body?:
|
||||||
|
| ArrayBuffer
|
||||||
|
| ArrayBufferView
|
||||||
|
| Blob
|
||||||
|
| FormData
|
||||||
|
| URLSearchParams
|
||||||
|
| string
|
||||||
|
| null,
|
||||||
|
init?: ResponseInit,
|
||||||
|
) => Response;
|
||||||
|
};
|
||||||
|
formSerializer?: FormSerializerOptions;
|
||||||
|
family?: AddressFamily;
|
||||||
|
lookup?:
|
||||||
|
| ((
|
||||||
|
hostname: string,
|
||||||
|
options: object,
|
||||||
|
cb: (
|
||||||
|
err: Error | null,
|
||||||
|
address: LookupAddress | LookupAddress[],
|
||||||
|
family?: AddressFamily,
|
||||||
|
) => void,
|
||||||
|
) => void)
|
||||||
|
| ((
|
||||||
|
hostname: string,
|
||||||
|
options: object,
|
||||||
|
) => Promise<
|
||||||
|
| [
|
||||||
|
address: LookupAddressEntry | LookupAddressEntry[],
|
||||||
|
family?: AddressFamily,
|
||||||
|
]
|
||||||
|
| LookupAddress
|
||||||
|
>);
|
||||||
|
withXSRFToken?:
|
||||||
|
| boolean
|
||||||
|
| ((config: InternalAxiosRequestConfig) => boolean | undefined);
|
||||||
|
parseReviver?: (this: any, key: string, value: any) => any;
|
||||||
|
fetchOptions?:
|
||||||
|
| Omit<RequestInit, "body" | "headers" | "method" | "signal">
|
||||||
|
| Record<string, any>;
|
||||||
|
httpVersion?: 1 | 2;
|
||||||
|
http2Options?: Record<string, any> & {
|
||||||
|
sessionTimeout?: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Alias
|
||||||
|
export type RawAxiosRequestConfig<D = any> = AxiosRequestConfig<D>;
|
||||||
|
|
||||||
|
export interface InternalAxiosRequestConfig<
|
||||||
|
D = any,
|
||||||
|
> extends AxiosRequestConfig<D> {
|
||||||
|
headers: AxiosRequestHeaders;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface HeadersDefaults {
|
||||||
|
common: RawAxiosRequestHeaders;
|
||||||
|
delete: RawAxiosRequestHeaders;
|
||||||
|
get: RawAxiosRequestHeaders;
|
||||||
|
head: RawAxiosRequestHeaders;
|
||||||
|
post: RawAxiosRequestHeaders;
|
||||||
|
put: RawAxiosRequestHeaders;
|
||||||
|
patch: RawAxiosRequestHeaders;
|
||||||
|
options?: RawAxiosRequestHeaders;
|
||||||
|
purge?: RawAxiosRequestHeaders;
|
||||||
|
link?: RawAxiosRequestHeaders;
|
||||||
|
unlink?: RawAxiosRequestHeaders;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosDefaults<D = any> extends Omit<
|
||||||
|
AxiosRequestConfig<D>,
|
||||||
|
"headers"
|
||||||
|
> {
|
||||||
|
headers: HeadersDefaults;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateAxiosDefaults<D = any> extends Omit<
|
||||||
|
AxiosRequestConfig<D>,
|
||||||
|
"headers"
|
||||||
|
> {
|
||||||
|
headers?: RawAxiosRequestHeaders | AxiosHeaders | Partial<HeadersDefaults>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosResponse<T = any, D = any, H = {}> {
|
||||||
|
data: T;
|
||||||
|
status: number;
|
||||||
|
statusText: string;
|
||||||
|
headers: (H & RawAxiosResponseHeaders) | AxiosResponseHeaders;
|
||||||
|
config: InternalAxiosRequestConfig<D>;
|
||||||
|
request?: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class AxiosError<T = unknown, D = any> extends Error {
|
||||||
|
constructor(
|
||||||
|
message?: string,
|
||||||
|
code?: string,
|
||||||
|
config?: InternalAxiosRequestConfig<D>,
|
||||||
|
request?: any,
|
||||||
|
response?: AxiosResponse<T, D>,
|
||||||
|
);
|
||||||
|
|
||||||
|
config?: InternalAxiosRequestConfig<D>;
|
||||||
|
code?: string;
|
||||||
|
request?: any;
|
||||||
|
response?: AxiosResponse<T, D>;
|
||||||
|
isAxiosError: boolean;
|
||||||
|
status?: number;
|
||||||
|
toJSON: () => object;
|
||||||
|
cause?: Error;
|
||||||
|
event?: BrowserProgressEvent;
|
||||||
|
static from<T = unknown, D = any>(
|
||||||
|
error: Error | unknown,
|
||||||
|
code?: string,
|
||||||
|
config?: InternalAxiosRequestConfig<D>,
|
||||||
|
request?: any,
|
||||||
|
response?: AxiosResponse<T, D>,
|
||||||
|
customProps?: object,
|
||||||
|
): AxiosError<T, D>;
|
||||||
|
static readonly ERR_FR_TOO_MANY_REDIRECTS = "ERR_FR_TOO_MANY_REDIRECTS";
|
||||||
|
static readonly ERR_BAD_OPTION_VALUE = "ERR_BAD_OPTION_VALUE";
|
||||||
|
static readonly ERR_BAD_OPTION = "ERR_BAD_OPTION";
|
||||||
|
static readonly ERR_NETWORK = "ERR_NETWORK";
|
||||||
|
static readonly ERR_DEPRECATED = "ERR_DEPRECATED";
|
||||||
|
static readonly ERR_BAD_RESPONSE = "ERR_BAD_RESPONSE";
|
||||||
|
static readonly ERR_BAD_REQUEST = "ERR_BAD_REQUEST";
|
||||||
|
static readonly ERR_NOT_SUPPORT = "ERR_NOT_SUPPORT";
|
||||||
|
static readonly ERR_INVALID_URL = "ERR_INVALID_URL";
|
||||||
|
static readonly ERR_CANCELED = "ERR_CANCELED";
|
||||||
|
static readonly ECONNABORTED = "ECONNABORTED";
|
||||||
|
static readonly ETIMEDOUT = "ETIMEDOUT";
|
||||||
|
}
|
||||||
|
|
||||||
|
export class CanceledError<T> extends AxiosError<T> {
|
||||||
|
readonly name: "CanceledError";
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AxiosPromise<T = any> = Promise<AxiosResponse<T>>;
|
||||||
|
|
||||||
|
export interface CancelStatic {
|
||||||
|
new (message?: string): Cancel;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Cancel {
|
||||||
|
message: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Canceler {
|
||||||
|
(message?: string, config?: AxiosRequestConfig, request?: any): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CancelTokenStatic {
|
||||||
|
new (executor: (cancel: Canceler) => void): CancelToken;
|
||||||
|
source(): CancelTokenSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CancelToken {
|
||||||
|
promise: Promise<Cancel>;
|
||||||
|
reason?: Cancel;
|
||||||
|
throwIfRequested(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CancelTokenSource {
|
||||||
|
token: CancelToken;
|
||||||
|
cancel: Canceler;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosInterceptorOptions {
|
||||||
|
synchronous?: boolean;
|
||||||
|
runWhen?: (config: InternalAxiosRequestConfig) => boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
type AxiosInterceptorFulfilled<T> = (value: T) => T | Promise<T>;
|
||||||
|
type AxiosInterceptorRejected = (error: any) => any;
|
||||||
|
|
||||||
|
type AxiosRequestInterceptorUse<T> = (
|
||||||
|
onFulfilled?: AxiosInterceptorFulfilled<T> | null,
|
||||||
|
onRejected?: AxiosInterceptorRejected | null,
|
||||||
|
options?: AxiosInterceptorOptions,
|
||||||
|
) => number;
|
||||||
|
|
||||||
|
type AxiosResponseInterceptorUse<T> = (
|
||||||
|
onFulfilled?: AxiosInterceptorFulfilled<T> | null,
|
||||||
|
onRejected?: AxiosInterceptorRejected | null,
|
||||||
|
) => number;
|
||||||
|
|
||||||
|
interface AxiosInterceptorHandler<T> {
|
||||||
|
fulfilled: AxiosInterceptorFulfilled<T>;
|
||||||
|
rejected?: AxiosInterceptorRejected;
|
||||||
|
synchronous: boolean;
|
||||||
|
runWhen: (config: AxiosRequestConfig) => boolean | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosInterceptorManager<V> {
|
||||||
|
use: V extends AxiosResponse
|
||||||
|
? AxiosResponseInterceptorUse<V>
|
||||||
|
: AxiosRequestInterceptorUse<V>;
|
||||||
|
eject(id: number): void;
|
||||||
|
clear(): void;
|
||||||
|
handlers?: Array<AxiosInterceptorHandler<V>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Axios {
|
||||||
|
constructor(config?: AxiosRequestConfig);
|
||||||
|
defaults: AxiosDefaults;
|
||||||
|
interceptors: {
|
||||||
|
request: AxiosInterceptorManager<InternalAxiosRequestConfig>;
|
||||||
|
response: AxiosInterceptorManager<AxiosResponse>;
|
||||||
|
};
|
||||||
|
getUri(config?: AxiosRequestConfig): string;
|
||||||
|
request<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
config: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
get<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
delete<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
head<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
options<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
post<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
data?: D,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
put<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
data?: D,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
patch<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
data?: D,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
postForm<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
data?: D,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
putForm<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
data?: D,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
patchForm<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
data?: D,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AxiosInstance extends Axios {
|
||||||
|
<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
config: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
<T = any, R = AxiosResponse<T>, D = any>(
|
||||||
|
url: string,
|
||||||
|
config?: AxiosRequestConfig<D>,
|
||||||
|
): Promise<R>;
|
||||||
|
|
||||||
|
create(config?: CreateAxiosDefaults): AxiosInstance;
|
||||||
|
defaults: Omit<AxiosDefaults, "headers"> & {
|
||||||
|
headers: HeadersDefaults & {
|
||||||
|
[key: string]: AxiosHeaderValue;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GenericFormData {
|
||||||
|
append(name: string, value: any, options?: any): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GenericHTMLFormElement {
|
||||||
|
name: string;
|
||||||
|
method: string;
|
||||||
|
submit(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAdapter(
|
||||||
|
adapters: AxiosAdapterConfig | AxiosAdapterConfig[] | undefined,
|
||||||
|
): AxiosAdapter;
|
||||||
|
|
||||||
|
export function toFormData(
|
||||||
|
sourceObj: object,
|
||||||
|
targetFormData?: GenericFormData,
|
||||||
|
options?: FormSerializerOptions,
|
||||||
|
): GenericFormData;
|
||||||
|
|
||||||
|
export function formToJSON(
|
||||||
|
form: GenericFormData | GenericHTMLFormElement,
|
||||||
|
): object;
|
||||||
|
|
||||||
|
export function isAxiosError<T = any, D = any>(
|
||||||
|
payload: any,
|
||||||
|
): payload is AxiosError<T, D>;
|
||||||
|
|
||||||
|
export function spread<T, R>(callback: (...args: T[]) => R): (array: T[]) => R;
|
||||||
|
|
||||||
|
export function isCancel<T = any>(value: any): value is CanceledError<T>;
|
||||||
|
|
||||||
|
export function all<T>(values: Array<T | Promise<T>>): Promise<T[]>;
|
||||||
|
|
||||||
|
export function mergeConfig<D = any>(
|
||||||
|
config1: AxiosRequestConfig<D>,
|
||||||
|
config2: AxiosRequestConfig<D>,
|
||||||
|
): AxiosRequestConfig<D>;
|
||||||
|
|
||||||
|
export interface AxiosStatic extends AxiosInstance {
|
||||||
|
Cancel: CancelStatic;
|
||||||
|
CancelToken: CancelTokenStatic;
|
||||||
|
Axios: typeof Axios;
|
||||||
|
AxiosError: typeof AxiosError;
|
||||||
|
HttpStatusCode: typeof HttpStatusCode;
|
||||||
|
readonly VERSION: string;
|
||||||
|
isCancel: typeof isCancel;
|
||||||
|
all: typeof all;
|
||||||
|
spread: typeof spread;
|
||||||
|
isAxiosError: typeof isAxiosError;
|
||||||
|
toFormData: typeof toFormData;
|
||||||
|
formToJSON: typeof formToJSON;
|
||||||
|
getAdapter: typeof getAdapter;
|
||||||
|
CanceledError: typeof CanceledError;
|
||||||
|
AxiosHeaders: typeof AxiosHeaders;
|
||||||
|
mergeConfig: typeof mergeConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare const axios: AxiosStatic;
|
||||||
|
|
||||||
|
export default axios;
|
||||||
43
server/node_modules/axios/index.js
generated
vendored
Normal file
43
server/node_modules/axios/index.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import axios from './lib/axios.js';
|
||||||
|
|
||||||
|
// This module is intended to unwrap Axios default export as named.
|
||||||
|
// Keep top-level export same with static properties
|
||||||
|
// so that it can keep same with es module or cjs
|
||||||
|
const {
|
||||||
|
Axios,
|
||||||
|
AxiosError,
|
||||||
|
CanceledError,
|
||||||
|
isCancel,
|
||||||
|
CancelToken,
|
||||||
|
VERSION,
|
||||||
|
all,
|
||||||
|
Cancel,
|
||||||
|
isAxiosError,
|
||||||
|
spread,
|
||||||
|
toFormData,
|
||||||
|
AxiosHeaders,
|
||||||
|
HttpStatusCode,
|
||||||
|
formToJSON,
|
||||||
|
getAdapter,
|
||||||
|
mergeConfig
|
||||||
|
} = axios;
|
||||||
|
|
||||||
|
export {
|
||||||
|
axios as default,
|
||||||
|
Axios,
|
||||||
|
AxiosError,
|
||||||
|
CanceledError,
|
||||||
|
isCancel,
|
||||||
|
CancelToken,
|
||||||
|
VERSION,
|
||||||
|
all,
|
||||||
|
Cancel,
|
||||||
|
isAxiosError,
|
||||||
|
spread,
|
||||||
|
toFormData,
|
||||||
|
AxiosHeaders,
|
||||||
|
HttpStatusCode,
|
||||||
|
formToJSON,
|
||||||
|
getAdapter,
|
||||||
|
mergeConfig
|
||||||
|
}
|
||||||
37
server/node_modules/axios/lib/adapters/README.md
generated
vendored
Normal file
37
server/node_modules/axios/lib/adapters/README.md
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# axios // adapters
|
||||||
|
|
||||||
|
The modules under `adapters/` are modules that handle dispatching a request and settling a returned `Promise` once a response is received.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```js
|
||||||
|
var settle = require('../core/settle');
|
||||||
|
|
||||||
|
module.exports = function myAdapter(config) {
|
||||||
|
// At this point:
|
||||||
|
// - config has been merged with defaults
|
||||||
|
// - request transformers have already run
|
||||||
|
// - request interceptors have already run
|
||||||
|
|
||||||
|
// Make the request using config provided
|
||||||
|
// Upon response settle the Promise
|
||||||
|
|
||||||
|
return new Promise(function(resolve, reject) {
|
||||||
|
|
||||||
|
var response = {
|
||||||
|
data: responseData,
|
||||||
|
status: request.status,
|
||||||
|
statusText: request.statusText,
|
||||||
|
headers: responseHeaders,
|
||||||
|
config: config,
|
||||||
|
request: request
|
||||||
|
};
|
||||||
|
|
||||||
|
settle(resolve, reject, response);
|
||||||
|
|
||||||
|
// From here:
|
||||||
|
// - response transformers will run
|
||||||
|
// - response interceptors will run
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
126
server/node_modules/axios/lib/adapters/adapters.js
generated
vendored
Normal file
126
server/node_modules/axios/lib/adapters/adapters.js
generated
vendored
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import utils from '../utils.js';
|
||||||
|
import httpAdapter from './http.js';
|
||||||
|
import xhrAdapter from './xhr.js';
|
||||||
|
import * as fetchAdapter from './fetch.js';
|
||||||
|
import AxiosError from "../core/AxiosError.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Known adapters mapping.
|
||||||
|
* Provides environment-specific adapters for Axios:
|
||||||
|
* - `http` for Node.js
|
||||||
|
* - `xhr` for browsers
|
||||||
|
* - `fetch` for fetch API-based requests
|
||||||
|
*
|
||||||
|
* @type {Object<string, Function|Object>}
|
||||||
|
*/
|
||||||
|
const knownAdapters = {
|
||||||
|
http: httpAdapter,
|
||||||
|
xhr: xhrAdapter,
|
||||||
|
fetch: {
|
||||||
|
get: fetchAdapter.getFetch,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Assign adapter names for easier debugging and identification
|
||||||
|
utils.forEach(knownAdapters, (fn, value) => {
|
||||||
|
if (fn) {
|
||||||
|
try {
|
||||||
|
Object.defineProperty(fn, 'name', { value });
|
||||||
|
} catch (e) {
|
||||||
|
// eslint-disable-next-line no-empty
|
||||||
|
}
|
||||||
|
Object.defineProperty(fn, 'adapterName', { value });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render a rejection reason string for unknown or unsupported adapters
|
||||||
|
*
|
||||||
|
* @param {string} reason
|
||||||
|
* @returns {string}
|
||||||
|
*/
|
||||||
|
const renderReason = (reason) => `- ${reason}`;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the adapter is resolved (function, null, or false)
|
||||||
|
*
|
||||||
|
* @param {Function|null|false} adapter
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
const isResolvedHandle = (adapter) => utils.isFunction(adapter) || adapter === null || adapter === false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the first suitable adapter from the provided list.
|
||||||
|
* Tries each adapter in order until a supported one is found.
|
||||||
|
* Throws an AxiosError if no adapter is suitable.
|
||||||
|
*
|
||||||
|
* @param {Array<string|Function>|string|Function} adapters - Adapter(s) by name or function.
|
||||||
|
* @param {Object} config - Axios request configuration
|
||||||
|
* @throws {AxiosError} If no suitable adapter is available
|
||||||
|
* @returns {Function} The resolved adapter function
|
||||||
|
*/
|
||||||
|
function getAdapter(adapters, config) {
|
||||||
|
adapters = utils.isArray(adapters) ? adapters : [adapters];
|
||||||
|
|
||||||
|
const { length } = adapters;
|
||||||
|
let nameOrAdapter;
|
||||||
|
let adapter;
|
||||||
|
|
||||||
|
const rejectedReasons = {};
|
||||||
|
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
nameOrAdapter = adapters[i];
|
||||||
|
let id;
|
||||||
|
|
||||||
|
adapter = nameOrAdapter;
|
||||||
|
|
||||||
|
if (!isResolvedHandle(nameOrAdapter)) {
|
||||||
|
adapter = knownAdapters[(id = String(nameOrAdapter)).toLowerCase()];
|
||||||
|
|
||||||
|
if (adapter === undefined) {
|
||||||
|
throw new AxiosError(`Unknown adapter '${id}'`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (adapter && (utils.isFunction(adapter) || (adapter = adapter.get(config)))) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
rejectedReasons[id || '#' + i] = adapter;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!adapter) {
|
||||||
|
const reasons = Object.entries(rejectedReasons)
|
||||||
|
.map(([id, state]) => `adapter ${id} ` +
|
||||||
|
(state === false ? 'is not supported by the environment' : 'is not available in the build')
|
||||||
|
);
|
||||||
|
|
||||||
|
let s = length ?
|
||||||
|
(reasons.length > 1 ? 'since :\n' + reasons.map(renderReason).join('\n') : ' ' + renderReason(reasons[0])) :
|
||||||
|
'as no adapter specified';
|
||||||
|
|
||||||
|
throw new AxiosError(
|
||||||
|
`There is no suitable adapter to dispatch the request ` + s,
|
||||||
|
'ERR_NOT_SUPPORT'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return adapter;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exports Axios adapters and utility to resolve an adapter
|
||||||
|
*/
|
||||||
|
export default {
|
||||||
|
/**
|
||||||
|
* Resolve an adapter from a list of adapter names or functions.
|
||||||
|
* @type {Function}
|
||||||
|
*/
|
||||||
|
getAdapter,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exposes all known adapters
|
||||||
|
* @type {Object<string, Function|Object>}
|
||||||
|
*/
|
||||||
|
adapters: knownAdapters
|
||||||
|
};
|
||||||
288
server/node_modules/axios/lib/adapters/fetch.js
generated
vendored
Normal file
288
server/node_modules/axios/lib/adapters/fetch.js
generated
vendored
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
import platform from "../platform/index.js";
|
||||||
|
import utils from "../utils.js";
|
||||||
|
import AxiosError from "../core/AxiosError.js";
|
||||||
|
import composeSignals from "../helpers/composeSignals.js";
|
||||||
|
import {trackStream} from "../helpers/trackStream.js";
|
||||||
|
import AxiosHeaders from "../core/AxiosHeaders.js";
|
||||||
|
import {progressEventReducer, progressEventDecorator, asyncDecorator} from "../helpers/progressEventReducer.js";
|
||||||
|
import resolveConfig from "../helpers/resolveConfig.js";
|
||||||
|
import settle from "../core/settle.js";
|
||||||
|
|
||||||
|
const DEFAULT_CHUNK_SIZE = 64 * 1024;
|
||||||
|
|
||||||
|
const {isFunction} = utils;
|
||||||
|
|
||||||
|
const globalFetchAPI = (({Request, Response}) => ({
|
||||||
|
Request, Response
|
||||||
|
}))(utils.global);
|
||||||
|
|
||||||
|
const {
|
||||||
|
ReadableStream, TextEncoder
|
||||||
|
} = utils.global;
|
||||||
|
|
||||||
|
|
||||||
|
const test = (fn, ...args) => {
|
||||||
|
try {
|
||||||
|
return !!fn(...args);
|
||||||
|
} catch (e) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const factory = (env) => {
|
||||||
|
env = utils.merge.call({
|
||||||
|
skipUndefined: true
|
||||||
|
}, globalFetchAPI, env);
|
||||||
|
|
||||||
|
const {fetch: envFetch, Request, Response} = env;
|
||||||
|
const isFetchSupported = envFetch ? isFunction(envFetch) : typeof fetch === 'function';
|
||||||
|
const isRequestSupported = isFunction(Request);
|
||||||
|
const isResponseSupported = isFunction(Response);
|
||||||
|
|
||||||
|
if (!isFetchSupported) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const isReadableStreamSupported = isFetchSupported && isFunction(ReadableStream);
|
||||||
|
|
||||||
|
const encodeText = isFetchSupported && (typeof TextEncoder === 'function' ?
|
||||||
|
((encoder) => (str) => encoder.encode(str))(new TextEncoder()) :
|
||||||
|
async (str) => new Uint8Array(await new Request(str).arrayBuffer())
|
||||||
|
);
|
||||||
|
|
||||||
|
const supportsRequestStream = isRequestSupported && isReadableStreamSupported && test(() => {
|
||||||
|
let duplexAccessed = false;
|
||||||
|
|
||||||
|
const hasContentType = new Request(platform.origin, {
|
||||||
|
body: new ReadableStream(),
|
||||||
|
method: 'POST',
|
||||||
|
get duplex() {
|
||||||
|
duplexAccessed = true;
|
||||||
|
return 'half';
|
||||||
|
},
|
||||||
|
}).headers.has('Content-Type');
|
||||||
|
|
||||||
|
return duplexAccessed && !hasContentType;
|
||||||
|
});
|
||||||
|
|
||||||
|
const supportsResponseStream = isResponseSupported && isReadableStreamSupported &&
|
||||||
|
test(() => utils.isReadableStream(new Response('').body));
|
||||||
|
|
||||||
|
const resolvers = {
|
||||||
|
stream: supportsResponseStream && ((res) => res.body)
|
||||||
|
};
|
||||||
|
|
||||||
|
isFetchSupported && ((() => {
|
||||||
|
['text', 'arrayBuffer', 'blob', 'formData', 'stream'].forEach(type => {
|
||||||
|
!resolvers[type] && (resolvers[type] = (res, config) => {
|
||||||
|
let method = res && res[type];
|
||||||
|
|
||||||
|
if (method) {
|
||||||
|
return method.call(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new AxiosError(`Response type '${type}' is not supported`, AxiosError.ERR_NOT_SUPPORT, config);
|
||||||
|
})
|
||||||
|
});
|
||||||
|
})());
|
||||||
|
|
||||||
|
const getBodyLength = async (body) => {
|
||||||
|
if (body == null) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isBlob(body)) {
|
||||||
|
return body.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isSpecCompliantForm(body)) {
|
||||||
|
const _request = new Request(platform.origin, {
|
||||||
|
method: 'POST',
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
return (await _request.arrayBuffer()).byteLength;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isArrayBufferView(body) || utils.isArrayBuffer(body)) {
|
||||||
|
return body.byteLength;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isURLSearchParams(body)) {
|
||||||
|
body = body + '';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isString(body)) {
|
||||||
|
return (await encodeText(body)).byteLength;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveBodyLength = async (headers, body) => {
|
||||||
|
const length = utils.toFiniteNumber(headers.getContentLength());
|
||||||
|
|
||||||
|
return length == null ? getBodyLength(body) : length;
|
||||||
|
}
|
||||||
|
|
||||||
|
return async (config) => {
|
||||||
|
let {
|
||||||
|
url,
|
||||||
|
method,
|
||||||
|
data,
|
||||||
|
signal,
|
||||||
|
cancelToken,
|
||||||
|
timeout,
|
||||||
|
onDownloadProgress,
|
||||||
|
onUploadProgress,
|
||||||
|
responseType,
|
||||||
|
headers,
|
||||||
|
withCredentials = 'same-origin',
|
||||||
|
fetchOptions
|
||||||
|
} = resolveConfig(config);
|
||||||
|
|
||||||
|
let _fetch = envFetch || fetch;
|
||||||
|
|
||||||
|
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
|
||||||
|
|
||||||
|
let composedSignal = composeSignals([signal, cancelToken && cancelToken.toAbortSignal()], timeout);
|
||||||
|
|
||||||
|
let request = null;
|
||||||
|
|
||||||
|
const unsubscribe = composedSignal && composedSignal.unsubscribe && (() => {
|
||||||
|
composedSignal.unsubscribe();
|
||||||
|
});
|
||||||
|
|
||||||
|
let requestContentLength;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (
|
||||||
|
onUploadProgress && supportsRequestStream && method !== 'get' && method !== 'head' &&
|
||||||
|
(requestContentLength = await resolveBodyLength(headers, data)) !== 0
|
||||||
|
) {
|
||||||
|
let _request = new Request(url, {
|
||||||
|
method: 'POST',
|
||||||
|
body: data,
|
||||||
|
duplex: "half"
|
||||||
|
});
|
||||||
|
|
||||||
|
let contentTypeHeader;
|
||||||
|
|
||||||
|
if (utils.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) {
|
||||||
|
headers.setContentType(contentTypeHeader)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_request.body) {
|
||||||
|
const [onProgress, flush] = progressEventDecorator(
|
||||||
|
requestContentLength,
|
||||||
|
progressEventReducer(asyncDecorator(onUploadProgress))
|
||||||
|
);
|
||||||
|
|
||||||
|
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!utils.isString(withCredentials)) {
|
||||||
|
withCredentials = withCredentials ? 'include' : 'omit';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cloudflare Workers throws when credentials are defined
|
||||||
|
// see https://github.com/cloudflare/workerd/issues/902
|
||||||
|
const isCredentialsSupported = isRequestSupported && "credentials" in Request.prototype;
|
||||||
|
|
||||||
|
const resolvedOptions = {
|
||||||
|
...fetchOptions,
|
||||||
|
signal: composedSignal,
|
||||||
|
method: method.toUpperCase(),
|
||||||
|
headers: headers.normalize().toJSON(),
|
||||||
|
body: data,
|
||||||
|
duplex: "half",
|
||||||
|
credentials: isCredentialsSupported ? withCredentials : undefined
|
||||||
|
};
|
||||||
|
|
||||||
|
request = isRequestSupported && new Request(url, resolvedOptions);
|
||||||
|
|
||||||
|
let response = await (isRequestSupported ? _fetch(request, fetchOptions) : _fetch(url, resolvedOptions));
|
||||||
|
|
||||||
|
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
|
||||||
|
|
||||||
|
if (supportsResponseStream && (onDownloadProgress || (isStreamResponse && unsubscribe))) {
|
||||||
|
const options = {};
|
||||||
|
|
||||||
|
['status', 'statusText', 'headers'].forEach(prop => {
|
||||||
|
options[prop] = response[prop];
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseContentLength = utils.toFiniteNumber(response.headers.get('content-length'));
|
||||||
|
|
||||||
|
const [onProgress, flush] = onDownloadProgress && progressEventDecorator(
|
||||||
|
responseContentLength,
|
||||||
|
progressEventReducer(asyncDecorator(onDownloadProgress), true)
|
||||||
|
) || [];
|
||||||
|
|
||||||
|
response = new Response(
|
||||||
|
trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => {
|
||||||
|
flush && flush();
|
||||||
|
unsubscribe && unsubscribe();
|
||||||
|
}),
|
||||||
|
options
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
responseType = responseType || 'text';
|
||||||
|
|
||||||
|
let responseData = await resolvers[utils.findKey(resolvers, responseType) || 'text'](response, config);
|
||||||
|
|
||||||
|
!isStreamResponse && unsubscribe && unsubscribe();
|
||||||
|
|
||||||
|
return await new Promise((resolve, reject) => {
|
||||||
|
settle(resolve, reject, {
|
||||||
|
data: responseData,
|
||||||
|
headers: AxiosHeaders.from(response.headers),
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.statusText,
|
||||||
|
config,
|
||||||
|
request
|
||||||
|
})
|
||||||
|
})
|
||||||
|
} catch (err) {
|
||||||
|
unsubscribe && unsubscribe();
|
||||||
|
|
||||||
|
if (err && err.name === 'TypeError' && /Load failed|fetch/i.test(err.message)) {
|
||||||
|
throw Object.assign(
|
||||||
|
new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request, err && err.response),
|
||||||
|
{
|
||||||
|
cause: err.cause || err
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
throw AxiosError.from(err, err && err.code, config, request, err && err.response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const seedCache = new Map();
|
||||||
|
|
||||||
|
export const getFetch = (config) => {
|
||||||
|
let env = (config && config.env) || {};
|
||||||
|
const {fetch, Request, Response} = env;
|
||||||
|
const seeds = [
|
||||||
|
Request, Response, fetch
|
||||||
|
];
|
||||||
|
|
||||||
|
let len = seeds.length, i = len,
|
||||||
|
seed, target, map = seedCache;
|
||||||
|
|
||||||
|
while (i--) {
|
||||||
|
seed = seeds[i];
|
||||||
|
target = map.get(seed);
|
||||||
|
|
||||||
|
target === undefined && map.set(seed, target = (i ? new Map() : factory(env)))
|
||||||
|
|
||||||
|
map = target;
|
||||||
|
}
|
||||||
|
|
||||||
|
return target;
|
||||||
|
};
|
||||||
|
|
||||||
|
const adapter = getFetch();
|
||||||
|
|
||||||
|
export default adapter;
|
||||||
898
server/node_modules/axios/lib/adapters/http.js
generated
vendored
Executable file
898
server/node_modules/axios/lib/adapters/http.js
generated
vendored
Executable file
@@ -0,0 +1,898 @@
|
|||||||
|
import utils from '../utils.js';
|
||||||
|
import settle from '../core/settle.js';
|
||||||
|
import buildFullPath from '../core/buildFullPath.js';
|
||||||
|
import buildURL from '../helpers/buildURL.js';
|
||||||
|
import proxyFromEnv from 'proxy-from-env';
|
||||||
|
import http from 'http';
|
||||||
|
import https from 'https';
|
||||||
|
import http2 from 'http2';
|
||||||
|
import util from 'util';
|
||||||
|
import followRedirects from 'follow-redirects';
|
||||||
|
import zlib from 'zlib';
|
||||||
|
import {VERSION} from '../env/data.js';
|
||||||
|
import transitionalDefaults from '../defaults/transitional.js';
|
||||||
|
import AxiosError from '../core/AxiosError.js';
|
||||||
|
import CanceledError from '../cancel/CanceledError.js';
|
||||||
|
import platform from '../platform/index.js';
|
||||||
|
import fromDataURI from '../helpers/fromDataURI.js';
|
||||||
|
import stream from 'stream';
|
||||||
|
import AxiosHeaders from '../core/AxiosHeaders.js';
|
||||||
|
import AxiosTransformStream from '../helpers/AxiosTransformStream.js';
|
||||||
|
import {EventEmitter} from 'events';
|
||||||
|
import formDataToStream from "../helpers/formDataToStream.js";
|
||||||
|
import readBlob from "../helpers/readBlob.js";
|
||||||
|
import ZlibHeaderTransformStream from '../helpers/ZlibHeaderTransformStream.js';
|
||||||
|
import callbackify from "../helpers/callbackify.js";
|
||||||
|
import {progressEventReducer, progressEventDecorator, asyncDecorator} from "../helpers/progressEventReducer.js";
|
||||||
|
import estimateDataURLDecodedBytes from '../helpers/estimateDataURLDecodedBytes.js';
|
||||||
|
|
||||||
|
const zlibOptions = {
|
||||||
|
flush: zlib.constants.Z_SYNC_FLUSH,
|
||||||
|
finishFlush: zlib.constants.Z_SYNC_FLUSH
|
||||||
|
};
|
||||||
|
|
||||||
|
const brotliOptions = {
|
||||||
|
flush: zlib.constants.BROTLI_OPERATION_FLUSH,
|
||||||
|
finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH
|
||||||
|
}
|
||||||
|
|
||||||
|
const isBrotliSupported = utils.isFunction(zlib.createBrotliDecompress);
|
||||||
|
|
||||||
|
const {http: httpFollow, https: httpsFollow} = followRedirects;
|
||||||
|
|
||||||
|
const isHttps = /https:?/;
|
||||||
|
|
||||||
|
const supportedProtocols = platform.protocols.map(protocol => {
|
||||||
|
return protocol + ':';
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
const flushOnFinish = (stream, [throttled, flush]) => {
|
||||||
|
stream
|
||||||
|
.on('end', flush)
|
||||||
|
.on('error', flush);
|
||||||
|
|
||||||
|
return throttled;
|
||||||
|
}
|
||||||
|
|
||||||
|
class Http2Sessions {
|
||||||
|
constructor() {
|
||||||
|
this.sessions = Object.create(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
getSession(authority, options) {
|
||||||
|
options = Object.assign({
|
||||||
|
sessionTimeout: 1000
|
||||||
|
}, options);
|
||||||
|
|
||||||
|
let authoritySessions = this.sessions[authority];
|
||||||
|
|
||||||
|
if (authoritySessions) {
|
||||||
|
let len = authoritySessions.length;
|
||||||
|
|
||||||
|
for (let i = 0; i < len; i++) {
|
||||||
|
const [sessionHandle, sessionOptions] = authoritySessions[i];
|
||||||
|
if (!sessionHandle.destroyed && !sessionHandle.closed && util.isDeepStrictEqual(sessionOptions, options)) {
|
||||||
|
return sessionHandle;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = http2.connect(authority, options);
|
||||||
|
|
||||||
|
let removed;
|
||||||
|
|
||||||
|
const removeSession = () => {
|
||||||
|
if (removed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
removed = true;
|
||||||
|
|
||||||
|
let entries = authoritySessions, len = entries.length, i = len;
|
||||||
|
|
||||||
|
while (i--) {
|
||||||
|
if (entries[i][0] === session) {
|
||||||
|
if (len === 1) {
|
||||||
|
delete this.sessions[authority];
|
||||||
|
} else {
|
||||||
|
entries.splice(i, 1);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const originalRequestFn = session.request;
|
||||||
|
|
||||||
|
const {sessionTimeout} = options;
|
||||||
|
|
||||||
|
if(sessionTimeout != null) {
|
||||||
|
|
||||||
|
let timer;
|
||||||
|
let streamsCount = 0;
|
||||||
|
|
||||||
|
session.request = function () {
|
||||||
|
const stream = originalRequestFn.apply(this, arguments);
|
||||||
|
|
||||||
|
streamsCount++;
|
||||||
|
|
||||||
|
if (timer) {
|
||||||
|
clearTimeout(timer);
|
||||||
|
timer = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
stream.once('close', () => {
|
||||||
|
if (!--streamsCount) {
|
||||||
|
timer = setTimeout(() => {
|
||||||
|
timer = null;
|
||||||
|
removeSession();
|
||||||
|
}, sessionTimeout);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
session.once('close', removeSession);
|
||||||
|
|
||||||
|
let entry = [
|
||||||
|
session,
|
||||||
|
options
|
||||||
|
];
|
||||||
|
|
||||||
|
authoritySessions ? authoritySessions.push(entry) : authoritySessions = this.sessions[authority] = [entry];
|
||||||
|
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const http2Sessions = new Http2Sessions();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the proxy or config beforeRedirects functions are defined, call them with the options
|
||||||
|
* object.
|
||||||
|
*
|
||||||
|
* @param {Object<string, any>} options - The options object that was passed to the request.
|
||||||
|
*
|
||||||
|
* @returns {Object<string, any>}
|
||||||
|
*/
|
||||||
|
function dispatchBeforeRedirect(options, responseDetails) {
|
||||||
|
if (options.beforeRedirects.proxy) {
|
||||||
|
options.beforeRedirects.proxy(options);
|
||||||
|
}
|
||||||
|
if (options.beforeRedirects.config) {
|
||||||
|
options.beforeRedirects.config(options, responseDetails);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the proxy or config afterRedirects functions are defined, call them with the options
|
||||||
|
*
|
||||||
|
* @param {http.ClientRequestArgs} options
|
||||||
|
* @param {AxiosProxyConfig} configProxy configuration from Axios options object
|
||||||
|
* @param {string} location
|
||||||
|
*
|
||||||
|
* @returns {http.ClientRequestArgs}
|
||||||
|
*/
|
||||||
|
function setProxy(options, configProxy, location) {
|
||||||
|
let proxy = configProxy;
|
||||||
|
if (!proxy && proxy !== false) {
|
||||||
|
const proxyUrl = proxyFromEnv.getProxyForUrl(location);
|
||||||
|
if (proxyUrl) {
|
||||||
|
proxy = new URL(proxyUrl);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (proxy) {
|
||||||
|
// Basic proxy authorization
|
||||||
|
if (proxy.username) {
|
||||||
|
proxy.auth = (proxy.username || '') + ':' + (proxy.password || '');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (proxy.auth) {
|
||||||
|
// Support proxy auth object form
|
||||||
|
const validProxyAuth = Boolean(proxy.auth.username || proxy.auth.password);
|
||||||
|
|
||||||
|
if (validProxyAuth) {
|
||||||
|
proxy.auth = (proxy.auth.username || '') + ':' + (proxy.auth.password || '');
|
||||||
|
} else if (typeof proxy.auth === 'object') {
|
||||||
|
throw new AxiosError('Invalid proxy authorization', AxiosError.ERR_BAD_OPTION, { proxy });
|
||||||
|
}
|
||||||
|
|
||||||
|
const base64 = Buffer.from(proxy.auth, 'utf8').toString('base64');
|
||||||
|
|
||||||
|
options.headers['Proxy-Authorization'] = 'Basic ' + base64;
|
||||||
|
}
|
||||||
|
|
||||||
|
options.headers.host = options.hostname + (options.port ? ':' + options.port : '');
|
||||||
|
const proxyHost = proxy.hostname || proxy.host;
|
||||||
|
options.hostname = proxyHost;
|
||||||
|
// Replace 'host' since options is not a URL object
|
||||||
|
options.host = proxyHost;
|
||||||
|
options.port = proxy.port;
|
||||||
|
options.path = location;
|
||||||
|
if (proxy.protocol) {
|
||||||
|
options.protocol = proxy.protocol.includes(':') ? proxy.protocol : `${proxy.protocol}:`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
options.beforeRedirects.proxy = function beforeRedirect(redirectOptions) {
|
||||||
|
// Configure proxy for redirected request, passing the original config proxy to apply
|
||||||
|
// the exact same logic as if the redirected request was performed by axios directly.
|
||||||
|
setProxy(redirectOptions, configProxy, redirectOptions.href);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const isHttpAdapterSupported = typeof process !== 'undefined' && utils.kindOf(process) === 'process';
|
||||||
|
|
||||||
|
// temporary hotfix
|
||||||
|
|
||||||
|
const wrapAsync = (asyncExecutor) => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let onDone;
|
||||||
|
let isDone;
|
||||||
|
|
||||||
|
const done = (value, isRejected) => {
|
||||||
|
if (isDone) return;
|
||||||
|
isDone = true;
|
||||||
|
onDone && onDone(value, isRejected);
|
||||||
|
}
|
||||||
|
|
||||||
|
const _resolve = (value) => {
|
||||||
|
done(value);
|
||||||
|
resolve(value);
|
||||||
|
};
|
||||||
|
|
||||||
|
const _reject = (reason) => {
|
||||||
|
done(reason, true);
|
||||||
|
reject(reason);
|
||||||
|
}
|
||||||
|
|
||||||
|
asyncExecutor(_resolve, _reject, (onDoneHandler) => (onDone = onDoneHandler)).catch(_reject);
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
const resolveFamily = ({address, family}) => {
|
||||||
|
if (!utils.isString(address)) {
|
||||||
|
throw TypeError('address must be a string');
|
||||||
|
}
|
||||||
|
return ({
|
||||||
|
address,
|
||||||
|
family: family || (address.indexOf('.') < 0 ? 6 : 4)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const buildAddressEntry = (address, family) => resolveFamily(utils.isObject(address) ? address : {address, family});
|
||||||
|
|
||||||
|
const http2Transport = {
|
||||||
|
request(options, cb) {
|
||||||
|
const authority = options.protocol + '//' + options.hostname + ':' + (options.port ||(options.protocol === 'https:' ? 443 : 80));
|
||||||
|
|
||||||
|
|
||||||
|
const {http2Options, headers} = options;
|
||||||
|
|
||||||
|
const session = http2Sessions.getSession(authority, http2Options);
|
||||||
|
|
||||||
|
const {
|
||||||
|
HTTP2_HEADER_SCHEME,
|
||||||
|
HTTP2_HEADER_METHOD,
|
||||||
|
HTTP2_HEADER_PATH,
|
||||||
|
HTTP2_HEADER_STATUS
|
||||||
|
} = http2.constants;
|
||||||
|
|
||||||
|
const http2Headers = {
|
||||||
|
[HTTP2_HEADER_SCHEME]: options.protocol.replace(':', ''),
|
||||||
|
[HTTP2_HEADER_METHOD]: options.method,
|
||||||
|
[HTTP2_HEADER_PATH]: options.path,
|
||||||
|
}
|
||||||
|
|
||||||
|
utils.forEach(headers, (header, name) => {
|
||||||
|
name.charAt(0) !== ':' && (http2Headers[name] = header);
|
||||||
|
});
|
||||||
|
|
||||||
|
const req = session.request(http2Headers);
|
||||||
|
|
||||||
|
req.once('response', (responseHeaders) => {
|
||||||
|
const response = req; //duplex
|
||||||
|
|
||||||
|
responseHeaders = Object.assign({}, responseHeaders);
|
||||||
|
|
||||||
|
const status = responseHeaders[HTTP2_HEADER_STATUS];
|
||||||
|
|
||||||
|
delete responseHeaders[HTTP2_HEADER_STATUS];
|
||||||
|
|
||||||
|
response.headers = responseHeaders;
|
||||||
|
|
||||||
|
response.statusCode = +status;
|
||||||
|
|
||||||
|
cb(response);
|
||||||
|
})
|
||||||
|
|
||||||
|
return req;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*eslint consistent-return:0*/
|
||||||
|
export default isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
|
return wrapAsync(async function dispatchHttpRequest(resolve, reject, onDone) {
|
||||||
|
let {data, lookup, family, httpVersion = 1, http2Options} = config;
|
||||||
|
const {responseType, responseEncoding} = config;
|
||||||
|
const method = config.method.toUpperCase();
|
||||||
|
let isDone;
|
||||||
|
let rejected = false;
|
||||||
|
let req;
|
||||||
|
|
||||||
|
httpVersion = +httpVersion;
|
||||||
|
|
||||||
|
if (Number.isNaN(httpVersion)) {
|
||||||
|
throw TypeError(`Invalid protocol version: '${config.httpVersion}' is not a number`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (httpVersion !== 1 && httpVersion !== 2) {
|
||||||
|
throw TypeError(`Unsupported protocol version '${httpVersion}'`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isHttp2 = httpVersion === 2;
|
||||||
|
|
||||||
|
if (lookup) {
|
||||||
|
const _lookup = callbackify(lookup, (value) => utils.isArray(value) ? value : [value]);
|
||||||
|
// hotfix to support opt.all option which is required for node 20.x
|
||||||
|
lookup = (hostname, opt, cb) => {
|
||||||
|
_lookup(hostname, opt, (err, arg0, arg1) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const addresses = utils.isArray(arg0) ? arg0.map(addr => buildAddressEntry(addr)) : [buildAddressEntry(arg0, arg1)];
|
||||||
|
|
||||||
|
opt.all ? cb(err, addresses) : cb(err, addresses[0].address, addresses[0].family);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const abortEmitter = new EventEmitter();
|
||||||
|
|
||||||
|
function abort(reason) {
|
||||||
|
try {
|
||||||
|
abortEmitter.emit('abort', !reason || reason.type ? new CanceledError(null, config, req) : reason);
|
||||||
|
} catch(err) {
|
||||||
|
console.warn('emit error', err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
abortEmitter.once('abort', reject);
|
||||||
|
|
||||||
|
const onFinished = () => {
|
||||||
|
if (config.cancelToken) {
|
||||||
|
config.cancelToken.unsubscribe(abort);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.signal) {
|
||||||
|
config.signal.removeEventListener('abort', abort);
|
||||||
|
}
|
||||||
|
|
||||||
|
abortEmitter.removeAllListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.cancelToken || config.signal) {
|
||||||
|
config.cancelToken && config.cancelToken.subscribe(abort);
|
||||||
|
if (config.signal) {
|
||||||
|
config.signal.aborted ? abort() : config.signal.addEventListener('abort', abort);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onDone((response, isRejected) => {
|
||||||
|
isDone = true;
|
||||||
|
|
||||||
|
if (isRejected) {
|
||||||
|
rejected = true;
|
||||||
|
onFinished();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const {data} = response;
|
||||||
|
|
||||||
|
if (data instanceof stream.Readable || data instanceof stream.Duplex) {
|
||||||
|
const offListeners = stream.finished(data, () => {
|
||||||
|
offListeners();
|
||||||
|
onFinished();
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
onFinished();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Parse url
|
||||||
|
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
|
||||||
|
const parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined);
|
||||||
|
const protocol = parsed.protocol || supportedProtocols[0];
|
||||||
|
|
||||||
|
if (protocol === 'data:') {
|
||||||
|
// Apply the same semantics as HTTP: only enforce if a finite, non-negative cap is set.
|
||||||
|
if (config.maxContentLength > -1) {
|
||||||
|
// Use the exact string passed to fromDataURI (config.url); fall back to fullPath if needed.
|
||||||
|
const dataUrl = String(config.url || fullPath || '');
|
||||||
|
const estimated = estimateDataURLDecodedBytes(dataUrl);
|
||||||
|
|
||||||
|
if (estimated > config.maxContentLength) {
|
||||||
|
return reject(new AxiosError(
|
||||||
|
'maxContentLength size of ' + config.maxContentLength + ' exceeded',
|
||||||
|
AxiosError.ERR_BAD_RESPONSE,
|
||||||
|
config
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let convertedData;
|
||||||
|
|
||||||
|
if (method !== 'GET') {
|
||||||
|
return settle(resolve, reject, {
|
||||||
|
status: 405,
|
||||||
|
statusText: 'method not allowed',
|
||||||
|
headers: {},
|
||||||
|
config
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
convertedData = fromDataURI(config.url, responseType === 'blob', {
|
||||||
|
Blob: config.env && config.env.Blob
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
throw AxiosError.from(err, AxiosError.ERR_BAD_REQUEST, config);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (responseType === 'text') {
|
||||||
|
convertedData = convertedData.toString(responseEncoding);
|
||||||
|
|
||||||
|
if (!responseEncoding || responseEncoding === 'utf8') {
|
||||||
|
convertedData = utils.stripBOM(convertedData);
|
||||||
|
}
|
||||||
|
} else if (responseType === 'stream') {
|
||||||
|
convertedData = stream.Readable.from(convertedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
return settle(resolve, reject, {
|
||||||
|
data: convertedData,
|
||||||
|
status: 200,
|
||||||
|
statusText: 'OK',
|
||||||
|
headers: new AxiosHeaders(),
|
||||||
|
config
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (supportedProtocols.indexOf(protocol) === -1) {
|
||||||
|
return reject(new AxiosError(
|
||||||
|
'Unsupported protocol ' + protocol,
|
||||||
|
AxiosError.ERR_BAD_REQUEST,
|
||||||
|
config
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
const headers = AxiosHeaders.from(config.headers).normalize();
|
||||||
|
|
||||||
|
// Set User-Agent (required by some servers)
|
||||||
|
// See https://github.com/axios/axios/issues/69
|
||||||
|
// User-Agent is specified; handle case where no UA header is desired
|
||||||
|
// Only set header if it hasn't been set in config
|
||||||
|
headers.set('User-Agent', 'axios/' + VERSION, false);
|
||||||
|
|
||||||
|
const {onUploadProgress, onDownloadProgress} = config;
|
||||||
|
const maxRate = config.maxRate;
|
||||||
|
let maxUploadRate = undefined;
|
||||||
|
let maxDownloadRate = undefined;
|
||||||
|
|
||||||
|
// support for spec compliant FormData objects
|
||||||
|
if (utils.isSpecCompliantForm(data)) {
|
||||||
|
const userBoundary = headers.getContentType(/boundary=([-_\w\d]{10,70})/i);
|
||||||
|
|
||||||
|
data = formDataToStream(data, (formHeaders) => {
|
||||||
|
headers.set(formHeaders);
|
||||||
|
}, {
|
||||||
|
tag: `axios-${VERSION}-boundary`,
|
||||||
|
boundary: userBoundary && userBoundary[1] || undefined
|
||||||
|
});
|
||||||
|
// support for https://www.npmjs.com/package/form-data api
|
||||||
|
} else if (utils.isFormData(data) && utils.isFunction(data.getHeaders)) {
|
||||||
|
headers.set(data.getHeaders());
|
||||||
|
|
||||||
|
if (!headers.hasContentLength()) {
|
||||||
|
try {
|
||||||
|
const knownLength = await util.promisify(data.getLength).call(data);
|
||||||
|
Number.isFinite(knownLength) && knownLength >= 0 && headers.setContentLength(knownLength);
|
||||||
|
/*eslint no-empty:0*/
|
||||||
|
} catch (e) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (utils.isBlob(data) || utils.isFile(data)) {
|
||||||
|
data.size && headers.setContentType(data.type || 'application/octet-stream');
|
||||||
|
headers.setContentLength(data.size || 0);
|
||||||
|
data = stream.Readable.from(readBlob(data));
|
||||||
|
} else if (data && !utils.isStream(data)) {
|
||||||
|
if (Buffer.isBuffer(data)) {
|
||||||
|
// Nothing to do...
|
||||||
|
} else if (utils.isArrayBuffer(data)) {
|
||||||
|
data = Buffer.from(new Uint8Array(data));
|
||||||
|
} else if (utils.isString(data)) {
|
||||||
|
data = Buffer.from(data, 'utf-8');
|
||||||
|
} else {
|
||||||
|
return reject(new AxiosError(
|
||||||
|
'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream',
|
||||||
|
AxiosError.ERR_BAD_REQUEST,
|
||||||
|
config
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add Content-Length header if data exists
|
||||||
|
headers.setContentLength(data.length, false);
|
||||||
|
|
||||||
|
if (config.maxBodyLength > -1 && data.length > config.maxBodyLength) {
|
||||||
|
return reject(new AxiosError(
|
||||||
|
'Request body larger than maxBodyLength limit',
|
||||||
|
AxiosError.ERR_BAD_REQUEST,
|
||||||
|
config
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentLength = utils.toFiniteNumber(headers.getContentLength());
|
||||||
|
|
||||||
|
if (utils.isArray(maxRate)) {
|
||||||
|
maxUploadRate = maxRate[0];
|
||||||
|
maxDownloadRate = maxRate[1];
|
||||||
|
} else {
|
||||||
|
maxUploadRate = maxDownloadRate = maxRate;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data && (onUploadProgress || maxUploadRate)) {
|
||||||
|
if (!utils.isStream(data)) {
|
||||||
|
data = stream.Readable.from(data, {objectMode: false});
|
||||||
|
}
|
||||||
|
|
||||||
|
data = stream.pipeline([data, new AxiosTransformStream({
|
||||||
|
maxRate: utils.toFiniteNumber(maxUploadRate)
|
||||||
|
})], utils.noop);
|
||||||
|
|
||||||
|
onUploadProgress && data.on('progress', flushOnFinish(
|
||||||
|
data,
|
||||||
|
progressEventDecorator(
|
||||||
|
contentLength,
|
||||||
|
progressEventReducer(asyncDecorator(onUploadProgress), false, 3)
|
||||||
|
)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// HTTP basic authentication
|
||||||
|
let auth = undefined;
|
||||||
|
if (config.auth) {
|
||||||
|
const username = config.auth.username || '';
|
||||||
|
const password = config.auth.password || '';
|
||||||
|
auth = username + ':' + password;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!auth && parsed.username) {
|
||||||
|
const urlUsername = parsed.username;
|
||||||
|
const urlPassword = parsed.password;
|
||||||
|
auth = urlUsername + ':' + urlPassword;
|
||||||
|
}
|
||||||
|
|
||||||
|
auth && headers.delete('authorization');
|
||||||
|
|
||||||
|
let path;
|
||||||
|
|
||||||
|
try {
|
||||||
|
path = buildURL(
|
||||||
|
parsed.pathname + parsed.search,
|
||||||
|
config.params,
|
||||||
|
config.paramsSerializer
|
||||||
|
).replace(/^\?/, '');
|
||||||
|
} catch (err) {
|
||||||
|
const customErr = new Error(err.message);
|
||||||
|
customErr.config = config;
|
||||||
|
customErr.url = config.url;
|
||||||
|
customErr.exists = true;
|
||||||
|
return reject(customErr);
|
||||||
|
}
|
||||||
|
|
||||||
|
headers.set(
|
||||||
|
'Accept-Encoding',
|
||||||
|
'gzip, compress, deflate' + (isBrotliSupported ? ', br' : ''), false
|
||||||
|
);
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
path,
|
||||||
|
method: method,
|
||||||
|
headers: headers.toJSON(),
|
||||||
|
agents: { http: config.httpAgent, https: config.httpsAgent },
|
||||||
|
auth,
|
||||||
|
protocol,
|
||||||
|
family,
|
||||||
|
beforeRedirect: dispatchBeforeRedirect,
|
||||||
|
beforeRedirects: {},
|
||||||
|
http2Options
|
||||||
|
};
|
||||||
|
|
||||||
|
// cacheable-lookup integration hotfix
|
||||||
|
!utils.isUndefined(lookup) && (options.lookup = lookup);
|
||||||
|
|
||||||
|
if (config.socketPath) {
|
||||||
|
options.socketPath = config.socketPath;
|
||||||
|
} else {
|
||||||
|
options.hostname = parsed.hostname.startsWith("[") ? parsed.hostname.slice(1, -1) : parsed.hostname;
|
||||||
|
options.port = parsed.port;
|
||||||
|
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
let transport;
|
||||||
|
const isHttpsRequest = isHttps.test(options.protocol);
|
||||||
|
options.agent = isHttpsRequest ? config.httpsAgent : config.httpAgent;
|
||||||
|
|
||||||
|
if (isHttp2) {
|
||||||
|
transport = http2Transport;
|
||||||
|
} else {
|
||||||
|
if (config.transport) {
|
||||||
|
transport = config.transport;
|
||||||
|
} else if (config.maxRedirects === 0) {
|
||||||
|
transport = isHttpsRequest ? https : http;
|
||||||
|
} else {
|
||||||
|
if (config.maxRedirects) {
|
||||||
|
options.maxRedirects = config.maxRedirects;
|
||||||
|
}
|
||||||
|
if (config.beforeRedirect) {
|
||||||
|
options.beforeRedirects.config = config.beforeRedirect;
|
||||||
|
}
|
||||||
|
transport = isHttpsRequest ? httpsFollow : httpFollow;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.maxBodyLength > -1) {
|
||||||
|
options.maxBodyLength = config.maxBodyLength;
|
||||||
|
} else {
|
||||||
|
// follow-redirects does not skip comparison, so it should always succeed for axios -1 unlimited
|
||||||
|
options.maxBodyLength = Infinity;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.insecureHTTPParser) {
|
||||||
|
options.insecureHTTPParser = config.insecureHTTPParser;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the request
|
||||||
|
req = transport.request(options, function handleResponse(res) {
|
||||||
|
if (req.destroyed) return;
|
||||||
|
|
||||||
|
const streams = [res];
|
||||||
|
|
||||||
|
const responseLength = utils.toFiniteNumber(res.headers['content-length']);
|
||||||
|
|
||||||
|
if (onDownloadProgress || maxDownloadRate) {
|
||||||
|
const transformStream = new AxiosTransformStream({
|
||||||
|
maxRate: utils.toFiniteNumber(maxDownloadRate)
|
||||||
|
});
|
||||||
|
|
||||||
|
onDownloadProgress && transformStream.on('progress', flushOnFinish(
|
||||||
|
transformStream,
|
||||||
|
progressEventDecorator(
|
||||||
|
responseLength,
|
||||||
|
progressEventReducer(asyncDecorator(onDownloadProgress), true, 3)
|
||||||
|
)
|
||||||
|
));
|
||||||
|
|
||||||
|
streams.push(transformStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
// decompress the response body transparently if required
|
||||||
|
let responseStream = res;
|
||||||
|
|
||||||
|
// return the last request in case of redirects
|
||||||
|
const lastRequest = res.req || req;
|
||||||
|
|
||||||
|
// if decompress disabled we should not decompress
|
||||||
|
if (config.decompress !== false && res.headers['content-encoding']) {
|
||||||
|
// if no content, but headers still say that it is encoded,
|
||||||
|
// remove the header not confuse downstream operations
|
||||||
|
if (method === 'HEAD' || res.statusCode === 204) {
|
||||||
|
delete res.headers['content-encoding'];
|
||||||
|
}
|
||||||
|
|
||||||
|
switch ((res.headers['content-encoding'] || '').toLowerCase()) {
|
||||||
|
/*eslint default-case:0*/
|
||||||
|
case 'gzip':
|
||||||
|
case 'x-gzip':
|
||||||
|
case 'compress':
|
||||||
|
case 'x-compress':
|
||||||
|
// add the unzipper to the body stream processing pipeline
|
||||||
|
streams.push(zlib.createUnzip(zlibOptions));
|
||||||
|
|
||||||
|
// remove the content-encoding in order to not confuse downstream operations
|
||||||
|
delete res.headers['content-encoding'];
|
||||||
|
break;
|
||||||
|
case 'deflate':
|
||||||
|
streams.push(new ZlibHeaderTransformStream());
|
||||||
|
|
||||||
|
// add the unzipper to the body stream processing pipeline
|
||||||
|
streams.push(zlib.createUnzip(zlibOptions));
|
||||||
|
|
||||||
|
// remove the content-encoding in order to not confuse downstream operations
|
||||||
|
delete res.headers['content-encoding'];
|
||||||
|
break;
|
||||||
|
case 'br':
|
||||||
|
if (isBrotliSupported) {
|
||||||
|
streams.push(zlib.createBrotliDecompress(brotliOptions));
|
||||||
|
delete res.headers['content-encoding'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
responseStream = streams.length > 1 ? stream.pipeline(streams, utils.noop) : streams[0];
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
status: res.statusCode,
|
||||||
|
statusText: res.statusMessage,
|
||||||
|
headers: new AxiosHeaders(res.headers),
|
||||||
|
config,
|
||||||
|
request: lastRequest
|
||||||
|
};
|
||||||
|
|
||||||
|
if (responseType === 'stream') {
|
||||||
|
response.data = responseStream;
|
||||||
|
settle(resolve, reject, response);
|
||||||
|
} else {
|
||||||
|
const responseBuffer = [];
|
||||||
|
let totalResponseBytes = 0;
|
||||||
|
|
||||||
|
responseStream.on('data', function handleStreamData(chunk) {
|
||||||
|
responseBuffer.push(chunk);
|
||||||
|
totalResponseBytes += chunk.length;
|
||||||
|
|
||||||
|
// make sure the content length is not over the maxContentLength if specified
|
||||||
|
if (config.maxContentLength > -1 && totalResponseBytes > config.maxContentLength) {
|
||||||
|
// stream.destroy() emit aborted event before calling reject() on Node.js v16
|
||||||
|
rejected = true;
|
||||||
|
responseStream.destroy();
|
||||||
|
abort(new AxiosError('maxContentLength size of ' + config.maxContentLength + ' exceeded',
|
||||||
|
AxiosError.ERR_BAD_RESPONSE, config, lastRequest));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
responseStream.on('aborted', function handlerStreamAborted() {
|
||||||
|
if (rejected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const err = new AxiosError(
|
||||||
|
'stream has been aborted',
|
||||||
|
AxiosError.ERR_BAD_RESPONSE,
|
||||||
|
config,
|
||||||
|
lastRequest
|
||||||
|
);
|
||||||
|
responseStream.destroy(err);
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
responseStream.on('error', function handleStreamError(err) {
|
||||||
|
if (req.destroyed) return;
|
||||||
|
reject(AxiosError.from(err, null, config, lastRequest));
|
||||||
|
});
|
||||||
|
|
||||||
|
responseStream.on('end', function handleStreamEnd() {
|
||||||
|
try {
|
||||||
|
let responseData = responseBuffer.length === 1 ? responseBuffer[0] : Buffer.concat(responseBuffer);
|
||||||
|
if (responseType !== 'arraybuffer') {
|
||||||
|
responseData = responseData.toString(responseEncoding);
|
||||||
|
if (!responseEncoding || responseEncoding === 'utf8') {
|
||||||
|
responseData = utils.stripBOM(responseData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
response.data = responseData;
|
||||||
|
} catch (err) {
|
||||||
|
return reject(AxiosError.from(err, null, config, response.request, response));
|
||||||
|
}
|
||||||
|
settle(resolve, reject, response);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
abortEmitter.once('abort', err => {
|
||||||
|
if (!responseStream.destroyed) {
|
||||||
|
responseStream.emit('error', err);
|
||||||
|
responseStream.destroy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
abortEmitter.once('abort', err => {
|
||||||
|
if (req.close) {
|
||||||
|
req.close();
|
||||||
|
} else {
|
||||||
|
req.destroy(err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle errors
|
||||||
|
req.on('error', function handleRequestError(err) {
|
||||||
|
reject(AxiosError.from(err, null, config, req));
|
||||||
|
});
|
||||||
|
|
||||||
|
// set tcp keep alive to prevent drop connection by peer
|
||||||
|
req.on('socket', function handleRequestSocket(socket) {
|
||||||
|
// default interval of sending ack packet is 1 minute
|
||||||
|
socket.setKeepAlive(true, 1000 * 60);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle request timeout
|
||||||
|
if (config.timeout) {
|
||||||
|
// This is forcing a int timeout to avoid problems if the `req` interface doesn't handle other types.
|
||||||
|
const timeout = parseInt(config.timeout, 10);
|
||||||
|
|
||||||
|
if (Number.isNaN(timeout)) {
|
||||||
|
abort(new AxiosError(
|
||||||
|
'error trying to parse `config.timeout` to int',
|
||||||
|
AxiosError.ERR_BAD_OPTION_VALUE,
|
||||||
|
config,
|
||||||
|
req
|
||||||
|
));
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system.
|
||||||
|
// And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET.
|
||||||
|
// At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up.
|
||||||
|
// And then these socket which be hang up will devouring CPU little by little.
|
||||||
|
// ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect.
|
||||||
|
req.setTimeout(timeout, function handleRequestTimeout() {
|
||||||
|
if (isDone) return;
|
||||||
|
let timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded';
|
||||||
|
const transitional = config.transitional || transitionalDefaults;
|
||||||
|
if (config.timeoutErrorMessage) {
|
||||||
|
timeoutErrorMessage = config.timeoutErrorMessage;
|
||||||
|
}
|
||||||
|
abort(new AxiosError(
|
||||||
|
timeoutErrorMessage,
|
||||||
|
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
|
||||||
|
config,
|
||||||
|
req
|
||||||
|
));
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// explicitly reset the socket timeout value for a possible `keep-alive` request
|
||||||
|
req.setTimeout(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Send the request
|
||||||
|
if (utils.isStream(data)) {
|
||||||
|
let ended = false;
|
||||||
|
let errored = false;
|
||||||
|
|
||||||
|
data.on('end', () => {
|
||||||
|
ended = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
data.once('error', err => {
|
||||||
|
errored = true;
|
||||||
|
req.destroy(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
data.on('close', () => {
|
||||||
|
if (!ended && !errored) {
|
||||||
|
abort(new CanceledError('Request stream has been aborted', config, req));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
data.pipe(req);
|
||||||
|
} else {
|
||||||
|
data && req.write(data);
|
||||||
|
req.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export const __setProxy = setProxy;
|
||||||
200
server/node_modules/axios/lib/adapters/xhr.js
generated
vendored
Normal file
200
server/node_modules/axios/lib/adapters/xhr.js
generated
vendored
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
import utils from '../utils.js';
|
||||||
|
import settle from '../core/settle.js';
|
||||||
|
import transitionalDefaults from '../defaults/transitional.js';
|
||||||
|
import AxiosError from '../core/AxiosError.js';
|
||||||
|
import CanceledError from '../cancel/CanceledError.js';
|
||||||
|
import parseProtocol from '../helpers/parseProtocol.js';
|
||||||
|
import platform from '../platform/index.js';
|
||||||
|
import AxiosHeaders from '../core/AxiosHeaders.js';
|
||||||
|
import {progressEventReducer} from '../helpers/progressEventReducer.js';
|
||||||
|
import resolveConfig from "../helpers/resolveConfig.js";
|
||||||
|
|
||||||
|
const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined';
|
||||||
|
|
||||||
|
export default isXHRAdapterSupported && function (config) {
|
||||||
|
return new Promise(function dispatchXhrRequest(resolve, reject) {
|
||||||
|
const _config = resolveConfig(config);
|
||||||
|
let requestData = _config.data;
|
||||||
|
const requestHeaders = AxiosHeaders.from(_config.headers).normalize();
|
||||||
|
let {responseType, onUploadProgress, onDownloadProgress} = _config;
|
||||||
|
let onCanceled;
|
||||||
|
let uploadThrottled, downloadThrottled;
|
||||||
|
let flushUpload, flushDownload;
|
||||||
|
|
||||||
|
function done() {
|
||||||
|
flushUpload && flushUpload(); // flush events
|
||||||
|
flushDownload && flushDownload(); // flush events
|
||||||
|
|
||||||
|
_config.cancelToken && _config.cancelToken.unsubscribe(onCanceled);
|
||||||
|
|
||||||
|
_config.signal && _config.signal.removeEventListener('abort', onCanceled);
|
||||||
|
}
|
||||||
|
|
||||||
|
let request = new XMLHttpRequest();
|
||||||
|
|
||||||
|
request.open(_config.method.toUpperCase(), _config.url, true);
|
||||||
|
|
||||||
|
// Set the request timeout in MS
|
||||||
|
request.timeout = _config.timeout;
|
||||||
|
|
||||||
|
function onloadend() {
|
||||||
|
if (!request) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Prepare the response
|
||||||
|
const responseHeaders = AxiosHeaders.from(
|
||||||
|
'getAllResponseHeaders' in request && request.getAllResponseHeaders()
|
||||||
|
);
|
||||||
|
const responseData = !responseType || responseType === 'text' || responseType === 'json' ?
|
||||||
|
request.responseText : request.response;
|
||||||
|
const response = {
|
||||||
|
data: responseData,
|
||||||
|
status: request.status,
|
||||||
|
statusText: request.statusText,
|
||||||
|
headers: responseHeaders,
|
||||||
|
config,
|
||||||
|
request
|
||||||
|
};
|
||||||
|
|
||||||
|
settle(function _resolve(value) {
|
||||||
|
resolve(value);
|
||||||
|
done();
|
||||||
|
}, function _reject(err) {
|
||||||
|
reject(err);
|
||||||
|
done();
|
||||||
|
}, response);
|
||||||
|
|
||||||
|
// Clean up request
|
||||||
|
request = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('onloadend' in request) {
|
||||||
|
// Use onloadend if available
|
||||||
|
request.onloadend = onloadend;
|
||||||
|
} else {
|
||||||
|
// Listen for ready state to emulate onloadend
|
||||||
|
request.onreadystatechange = function handleLoad() {
|
||||||
|
if (!request || request.readyState !== 4) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The request errored out and we didn't get a response, this will be
|
||||||
|
// handled by onerror instead
|
||||||
|
// With one exception: request that using file: protocol, most browsers
|
||||||
|
// will return status as 0 even though it's a successful request
|
||||||
|
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// readystate handler is calling before onerror or ontimeout handlers,
|
||||||
|
// so we should call onloadend on the next 'tick'
|
||||||
|
setTimeout(onloadend);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle browser request cancellation (as opposed to a manual cancellation)
|
||||||
|
request.onabort = function handleAbort() {
|
||||||
|
if (!request) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, config, request));
|
||||||
|
|
||||||
|
// Clean up request
|
||||||
|
request = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle low level network errors
|
||||||
|
request.onerror = function handleError(event) {
|
||||||
|
// Browsers deliver a ProgressEvent in XHR onerror
|
||||||
|
// (message may be empty; when present, surface it)
|
||||||
|
// See https://developer.mozilla.org/docs/Web/API/XMLHttpRequest/error_event
|
||||||
|
const msg = event && event.message ? event.message : 'Network Error';
|
||||||
|
const err = new AxiosError(msg, AxiosError.ERR_NETWORK, config, request);
|
||||||
|
// attach the underlying event for consumers who want details
|
||||||
|
err.event = event || null;
|
||||||
|
reject(err);
|
||||||
|
request = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle timeout
|
||||||
|
request.ontimeout = function handleTimeout() {
|
||||||
|
let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded';
|
||||||
|
const transitional = _config.transitional || transitionalDefaults;
|
||||||
|
if (_config.timeoutErrorMessage) {
|
||||||
|
timeoutErrorMessage = _config.timeoutErrorMessage;
|
||||||
|
}
|
||||||
|
reject(new AxiosError(
|
||||||
|
timeoutErrorMessage,
|
||||||
|
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
|
||||||
|
config,
|
||||||
|
request));
|
||||||
|
|
||||||
|
// Clean up request
|
||||||
|
request = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Remove Content-Type if data is undefined
|
||||||
|
requestData === undefined && requestHeaders.setContentType(null);
|
||||||
|
|
||||||
|
// Add headers to the request
|
||||||
|
if ('setRequestHeader' in request) {
|
||||||
|
utils.forEach(requestHeaders.toJSON(), function setRequestHeader(val, key) {
|
||||||
|
request.setRequestHeader(key, val);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add withCredentials to request if needed
|
||||||
|
if (!utils.isUndefined(_config.withCredentials)) {
|
||||||
|
request.withCredentials = !!_config.withCredentials;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add responseType to request if needed
|
||||||
|
if (responseType && responseType !== 'json') {
|
||||||
|
request.responseType = _config.responseType;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle progress if needed
|
||||||
|
if (onDownloadProgress) {
|
||||||
|
([downloadThrottled, flushDownload] = progressEventReducer(onDownloadProgress, true));
|
||||||
|
request.addEventListener('progress', downloadThrottled);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not all browsers support upload events
|
||||||
|
if (onUploadProgress && request.upload) {
|
||||||
|
([uploadThrottled, flushUpload] = progressEventReducer(onUploadProgress));
|
||||||
|
|
||||||
|
request.upload.addEventListener('progress', uploadThrottled);
|
||||||
|
|
||||||
|
request.upload.addEventListener('loadend', flushUpload);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_config.cancelToken || _config.signal) {
|
||||||
|
// Handle cancellation
|
||||||
|
// eslint-disable-next-line func-names
|
||||||
|
onCanceled = cancel => {
|
||||||
|
if (!request) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
reject(!cancel || cancel.type ? new CanceledError(null, config, request) : cancel);
|
||||||
|
request.abort();
|
||||||
|
request = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
_config.cancelToken && _config.cancelToken.subscribe(onCanceled);
|
||||||
|
if (_config.signal) {
|
||||||
|
_config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const protocol = parseProtocol(_config.url);
|
||||||
|
|
||||||
|
if (protocol && platform.protocols.indexOf(protocol) === -1) {
|
||||||
|
reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Send the request
|
||||||
|
request.send(requestData || null);
|
||||||
|
});
|
||||||
|
}
|
||||||
89
server/node_modules/axios/lib/axios.js
generated
vendored
Normal file
89
server/node_modules/axios/lib/axios.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from './utils.js';
|
||||||
|
import bind from './helpers/bind.js';
|
||||||
|
import Axios from './core/Axios.js';
|
||||||
|
import mergeConfig from './core/mergeConfig.js';
|
||||||
|
import defaults from './defaults/index.js';
|
||||||
|
import formDataToJSON from './helpers/formDataToJSON.js';
|
||||||
|
import CanceledError from './cancel/CanceledError.js';
|
||||||
|
import CancelToken from './cancel/CancelToken.js';
|
||||||
|
import isCancel from './cancel/isCancel.js';
|
||||||
|
import {VERSION} from './env/data.js';
|
||||||
|
import toFormData from './helpers/toFormData.js';
|
||||||
|
import AxiosError from './core/AxiosError.js';
|
||||||
|
import spread from './helpers/spread.js';
|
||||||
|
import isAxiosError from './helpers/isAxiosError.js';
|
||||||
|
import AxiosHeaders from "./core/AxiosHeaders.js";
|
||||||
|
import adapters from './adapters/adapters.js';
|
||||||
|
import HttpStatusCode from './helpers/HttpStatusCode.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an instance of Axios
|
||||||
|
*
|
||||||
|
* @param {Object} defaultConfig The default config for the instance
|
||||||
|
*
|
||||||
|
* @returns {Axios} A new instance of Axios
|
||||||
|
*/
|
||||||
|
function createInstance(defaultConfig) {
|
||||||
|
const context = new Axios(defaultConfig);
|
||||||
|
const instance = bind(Axios.prototype.request, context);
|
||||||
|
|
||||||
|
// Copy axios.prototype to instance
|
||||||
|
utils.extend(instance, Axios.prototype, context, {allOwnKeys: true});
|
||||||
|
|
||||||
|
// Copy context to instance
|
||||||
|
utils.extend(instance, context, null, {allOwnKeys: true});
|
||||||
|
|
||||||
|
// Factory for creating new instances
|
||||||
|
instance.create = function create(instanceConfig) {
|
||||||
|
return createInstance(mergeConfig(defaultConfig, instanceConfig));
|
||||||
|
};
|
||||||
|
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the default instance to be exported
|
||||||
|
const axios = createInstance(defaults);
|
||||||
|
|
||||||
|
// Expose Axios class to allow class inheritance
|
||||||
|
axios.Axios = Axios;
|
||||||
|
|
||||||
|
// Expose Cancel & CancelToken
|
||||||
|
axios.CanceledError = CanceledError;
|
||||||
|
axios.CancelToken = CancelToken;
|
||||||
|
axios.isCancel = isCancel;
|
||||||
|
axios.VERSION = VERSION;
|
||||||
|
axios.toFormData = toFormData;
|
||||||
|
|
||||||
|
// Expose AxiosError class
|
||||||
|
axios.AxiosError = AxiosError;
|
||||||
|
|
||||||
|
// alias for CanceledError for backward compatibility
|
||||||
|
axios.Cancel = axios.CanceledError;
|
||||||
|
|
||||||
|
// Expose all/spread
|
||||||
|
axios.all = function all(promises) {
|
||||||
|
return Promise.all(promises);
|
||||||
|
};
|
||||||
|
|
||||||
|
axios.spread = spread;
|
||||||
|
|
||||||
|
// Expose isAxiosError
|
||||||
|
axios.isAxiosError = isAxiosError;
|
||||||
|
|
||||||
|
// Expose mergeConfig
|
||||||
|
axios.mergeConfig = mergeConfig;
|
||||||
|
|
||||||
|
axios.AxiosHeaders = AxiosHeaders;
|
||||||
|
|
||||||
|
axios.formToJSON = thing => formDataToJSON(utils.isHTMLForm(thing) ? new FormData(thing) : thing);
|
||||||
|
|
||||||
|
axios.getAdapter = adapters.getAdapter;
|
||||||
|
|
||||||
|
axios.HttpStatusCode = HttpStatusCode;
|
||||||
|
|
||||||
|
axios.default = axios;
|
||||||
|
|
||||||
|
// this module should only have a default export
|
||||||
|
export default axios
|
||||||
135
server/node_modules/axios/lib/cancel/CancelToken.js
generated
vendored
Normal file
135
server/node_modules/axios/lib/cancel/CancelToken.js
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import CanceledError from './CanceledError.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A `CancelToken` is an object that can be used to request cancellation of an operation.
|
||||||
|
*
|
||||||
|
* @param {Function} executor The executor function.
|
||||||
|
*
|
||||||
|
* @returns {CancelToken}
|
||||||
|
*/
|
||||||
|
class CancelToken {
|
||||||
|
constructor(executor) {
|
||||||
|
if (typeof executor !== 'function') {
|
||||||
|
throw new TypeError('executor must be a function.');
|
||||||
|
}
|
||||||
|
|
||||||
|
let resolvePromise;
|
||||||
|
|
||||||
|
this.promise = new Promise(function promiseExecutor(resolve) {
|
||||||
|
resolvePromise = resolve;
|
||||||
|
});
|
||||||
|
|
||||||
|
const token = this;
|
||||||
|
|
||||||
|
// eslint-disable-next-line func-names
|
||||||
|
this.promise.then(cancel => {
|
||||||
|
if (!token._listeners) return;
|
||||||
|
|
||||||
|
let i = token._listeners.length;
|
||||||
|
|
||||||
|
while (i-- > 0) {
|
||||||
|
token._listeners[i](cancel);
|
||||||
|
}
|
||||||
|
token._listeners = null;
|
||||||
|
});
|
||||||
|
|
||||||
|
// eslint-disable-next-line func-names
|
||||||
|
this.promise.then = onfulfilled => {
|
||||||
|
let _resolve;
|
||||||
|
// eslint-disable-next-line func-names
|
||||||
|
const promise = new Promise(resolve => {
|
||||||
|
token.subscribe(resolve);
|
||||||
|
_resolve = resolve;
|
||||||
|
}).then(onfulfilled);
|
||||||
|
|
||||||
|
promise.cancel = function reject() {
|
||||||
|
token.unsubscribe(_resolve);
|
||||||
|
};
|
||||||
|
|
||||||
|
return promise;
|
||||||
|
};
|
||||||
|
|
||||||
|
executor(function cancel(message, config, request) {
|
||||||
|
if (token.reason) {
|
||||||
|
// Cancellation has already been requested
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
token.reason = new CanceledError(message, config, request);
|
||||||
|
resolvePromise(token.reason);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Throws a `CanceledError` if cancellation has been requested.
|
||||||
|
*/
|
||||||
|
throwIfRequested() {
|
||||||
|
if (this.reason) {
|
||||||
|
throw this.reason;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subscribe to the cancel signal
|
||||||
|
*/
|
||||||
|
|
||||||
|
subscribe(listener) {
|
||||||
|
if (this.reason) {
|
||||||
|
listener(this.reason);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._listeners) {
|
||||||
|
this._listeners.push(listener);
|
||||||
|
} else {
|
||||||
|
this._listeners = [listener];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unsubscribe from the cancel signal
|
||||||
|
*/
|
||||||
|
|
||||||
|
unsubscribe(listener) {
|
||||||
|
if (!this._listeners) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const index = this._listeners.indexOf(listener);
|
||||||
|
if (index !== -1) {
|
||||||
|
this._listeners.splice(index, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toAbortSignal() {
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
const abort = (err) => {
|
||||||
|
controller.abort(err);
|
||||||
|
};
|
||||||
|
|
||||||
|
this.subscribe(abort);
|
||||||
|
|
||||||
|
controller.signal.unsubscribe = () => this.unsubscribe(abort);
|
||||||
|
|
||||||
|
return controller.signal;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an object that contains a new `CancelToken` and a function that, when called,
|
||||||
|
* cancels the `CancelToken`.
|
||||||
|
*/
|
||||||
|
static source() {
|
||||||
|
let cancel;
|
||||||
|
const token = new CancelToken(function executor(c) {
|
||||||
|
cancel = c;
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
token,
|
||||||
|
cancel
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default CancelToken;
|
||||||
22
server/node_modules/axios/lib/cancel/CanceledError.js
generated
vendored
Normal file
22
server/node_modules/axios/lib/cancel/CanceledError.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import AxiosError from '../core/AxiosError.js';
|
||||||
|
|
||||||
|
class CanceledError extends AxiosError {
|
||||||
|
/**
|
||||||
|
* A `CanceledError` is an object that is thrown when an operation is canceled.
|
||||||
|
*
|
||||||
|
* @param {string=} message The message.
|
||||||
|
* @param {Object=} config The config.
|
||||||
|
* @param {Object=} request The request.
|
||||||
|
*
|
||||||
|
* @returns {CanceledError} The created error.
|
||||||
|
*/
|
||||||
|
constructor(message, config, request) {
|
||||||
|
super(message == null ? 'canceled' : message, AxiosError.ERR_CANCELED, config, request);
|
||||||
|
this.name = 'CanceledError';
|
||||||
|
this.__CANCEL__ = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default CanceledError;
|
||||||
5
server/node_modules/axios/lib/cancel/isCancel.js
generated
vendored
Normal file
5
server/node_modules/axios/lib/cancel/isCancel.js
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
export default function isCancel(value) {
|
||||||
|
return !!(value && value.__CANCEL__);
|
||||||
|
}
|
||||||
249
server/node_modules/axios/lib/core/Axios.js
generated
vendored
Normal file
249
server/node_modules/axios/lib/core/Axios.js
generated
vendored
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from '../utils.js';
|
||||||
|
import buildURL from '../helpers/buildURL.js';
|
||||||
|
import InterceptorManager from './InterceptorManager.js';
|
||||||
|
import dispatchRequest from './dispatchRequest.js';
|
||||||
|
import mergeConfig from './mergeConfig.js';
|
||||||
|
import buildFullPath from './buildFullPath.js';
|
||||||
|
import validator from '../helpers/validator.js';
|
||||||
|
import AxiosHeaders from './AxiosHeaders.js';
|
||||||
|
import transitionalDefaults from '../defaults/transitional.js';
|
||||||
|
|
||||||
|
const validators = validator.validators;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of Axios
|
||||||
|
*
|
||||||
|
* @param {Object} instanceConfig The default config for the instance
|
||||||
|
*
|
||||||
|
* @return {Axios} A new instance of Axios
|
||||||
|
*/
|
||||||
|
class Axios {
|
||||||
|
constructor(instanceConfig) {
|
||||||
|
this.defaults = instanceConfig || {};
|
||||||
|
this.interceptors = {
|
||||||
|
request: new InterceptorManager(),
|
||||||
|
response: new InterceptorManager()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dispatch a request
|
||||||
|
*
|
||||||
|
* @param {String|Object} configOrUrl The config specific for this request (merged with this.defaults)
|
||||||
|
* @param {?Object} config
|
||||||
|
*
|
||||||
|
* @returns {Promise} The Promise to be fulfilled
|
||||||
|
*/
|
||||||
|
async request(configOrUrl, config) {
|
||||||
|
try {
|
||||||
|
return await this._request(configOrUrl, config);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
let dummy = {};
|
||||||
|
|
||||||
|
Error.captureStackTrace ? Error.captureStackTrace(dummy) : (dummy = new Error());
|
||||||
|
|
||||||
|
// slice off the Error: ... line
|
||||||
|
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
|
||||||
|
try {
|
||||||
|
if (!err.stack) {
|
||||||
|
err.stack = stack;
|
||||||
|
// match without the 2 top stack lines
|
||||||
|
} else if (stack && !String(err.stack).endsWith(stack.replace(/^.+\n.+\n/, ''))) {
|
||||||
|
err.stack += '\n' + stack
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// ignore the case where "stack" is an un-writable property
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_request(configOrUrl, config) {
|
||||||
|
/*eslint no-param-reassign:0*/
|
||||||
|
// Allow for axios('example/url'[, config]) a la fetch API
|
||||||
|
if (typeof configOrUrl === 'string') {
|
||||||
|
config = config || {};
|
||||||
|
config.url = configOrUrl;
|
||||||
|
} else {
|
||||||
|
config = configOrUrl || {};
|
||||||
|
}
|
||||||
|
|
||||||
|
config = mergeConfig(this.defaults, config);
|
||||||
|
|
||||||
|
const {transitional, paramsSerializer, headers} = config;
|
||||||
|
|
||||||
|
if (transitional !== undefined) {
|
||||||
|
validator.assertOptions(transitional, {
|
||||||
|
silentJSONParsing: validators.transitional(validators.boolean),
|
||||||
|
forcedJSONParsing: validators.transitional(validators.boolean),
|
||||||
|
clarifyTimeoutError: validators.transitional(validators.boolean),
|
||||||
|
legacyInterceptorReqResOrdering: validators.transitional(validators.boolean)
|
||||||
|
}, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (paramsSerializer != null) {
|
||||||
|
if (utils.isFunction(paramsSerializer)) {
|
||||||
|
config.paramsSerializer = {
|
||||||
|
serialize: paramsSerializer
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
validator.assertOptions(paramsSerializer, {
|
||||||
|
encode: validators.function,
|
||||||
|
serialize: validators.function
|
||||||
|
}, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set config.allowAbsoluteUrls
|
||||||
|
if (config.allowAbsoluteUrls !== undefined) {
|
||||||
|
// do nothing
|
||||||
|
} else if (this.defaults.allowAbsoluteUrls !== undefined) {
|
||||||
|
config.allowAbsoluteUrls = this.defaults.allowAbsoluteUrls;
|
||||||
|
} else {
|
||||||
|
config.allowAbsoluteUrls = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
validator.assertOptions(config, {
|
||||||
|
baseUrl: validators.spelling('baseURL'),
|
||||||
|
withXsrfToken: validators.spelling('withXSRFToken')
|
||||||
|
}, true);
|
||||||
|
|
||||||
|
// Set config.method
|
||||||
|
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
|
||||||
|
|
||||||
|
// Flatten headers
|
||||||
|
let contextHeaders = headers && utils.merge(
|
||||||
|
headers.common,
|
||||||
|
headers[config.method]
|
||||||
|
);
|
||||||
|
|
||||||
|
headers && utils.forEach(
|
||||||
|
['delete', 'get', 'head', 'post', 'put', 'patch', 'common'],
|
||||||
|
(method) => {
|
||||||
|
delete headers[method];
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
config.headers = AxiosHeaders.concat(contextHeaders, headers);
|
||||||
|
|
||||||
|
// filter out skipped interceptors
|
||||||
|
const requestInterceptorChain = [];
|
||||||
|
let synchronousRequestInterceptors = true;
|
||||||
|
this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) {
|
||||||
|
if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous;
|
||||||
|
|
||||||
|
const transitional = config.transitional || transitionalDefaults;
|
||||||
|
const legacyInterceptorReqResOrdering = transitional && transitional.legacyInterceptorReqResOrdering;
|
||||||
|
|
||||||
|
if (legacyInterceptorReqResOrdering) {
|
||||||
|
requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected);
|
||||||
|
} else {
|
||||||
|
requestInterceptorChain.push(interceptor.fulfilled, interceptor.rejected);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseInterceptorChain = [];
|
||||||
|
this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) {
|
||||||
|
responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected);
|
||||||
|
});
|
||||||
|
|
||||||
|
let promise;
|
||||||
|
let i = 0;
|
||||||
|
let len;
|
||||||
|
|
||||||
|
if (!synchronousRequestInterceptors) {
|
||||||
|
const chain = [dispatchRequest.bind(this), undefined];
|
||||||
|
chain.unshift(...requestInterceptorChain);
|
||||||
|
chain.push(...responseInterceptorChain);
|
||||||
|
len = chain.length;
|
||||||
|
|
||||||
|
promise = Promise.resolve(config);
|
||||||
|
|
||||||
|
while (i < len) {
|
||||||
|
promise = promise.then(chain[i++], chain[i++]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
len = requestInterceptorChain.length;
|
||||||
|
|
||||||
|
let newConfig = config;
|
||||||
|
|
||||||
|
while (i < len) {
|
||||||
|
const onFulfilled = requestInterceptorChain[i++];
|
||||||
|
const onRejected = requestInterceptorChain[i++];
|
||||||
|
try {
|
||||||
|
newConfig = onFulfilled(newConfig);
|
||||||
|
} catch (error) {
|
||||||
|
onRejected.call(this, error);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
promise = dispatchRequest.call(this, newConfig);
|
||||||
|
} catch (error) {
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
i = 0;
|
||||||
|
len = responseInterceptorChain.length;
|
||||||
|
|
||||||
|
while (i < len) {
|
||||||
|
promise = promise.then(responseInterceptorChain[i++], responseInterceptorChain[i++]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
getUri(config) {
|
||||||
|
config = mergeConfig(this.defaults, config);
|
||||||
|
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
|
||||||
|
return buildURL(fullPath, config.params, config.paramsSerializer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Provide aliases for supported request methods
|
||||||
|
utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) {
|
||||||
|
/*eslint func-names:0*/
|
||||||
|
Axios.prototype[method] = function(url, config) {
|
||||||
|
return this.request(mergeConfig(config || {}, {
|
||||||
|
method,
|
||||||
|
url,
|
||||||
|
data: (config || {}).data
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
|
||||||
|
/*eslint func-names:0*/
|
||||||
|
|
||||||
|
function generateHTTPMethod(isForm) {
|
||||||
|
return function httpMethod(url, data, config) {
|
||||||
|
return this.request(mergeConfig(config || {}, {
|
||||||
|
method,
|
||||||
|
headers: isForm ? {
|
||||||
|
'Content-Type': 'multipart/form-data'
|
||||||
|
} : {},
|
||||||
|
url,
|
||||||
|
data
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Axios.prototype[method] = generateHTTPMethod();
|
||||||
|
|
||||||
|
Axios.prototype[method + 'Form'] = generateHTTPMethod(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default Axios;
|
||||||
73
server/node_modules/axios/lib/core/AxiosError.js
generated
vendored
Normal file
73
server/node_modules/axios/lib/core/AxiosError.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from '../utils.js';
|
||||||
|
|
||||||
|
class AxiosError extends Error {
|
||||||
|
static from(error, code, config, request, response, customProps) {
|
||||||
|
const axiosError = new AxiosError(error.message, code || error.code, config, request, response);
|
||||||
|
axiosError.cause = error;
|
||||||
|
axiosError.name = error.name;
|
||||||
|
customProps && Object.assign(axiosError, customProps);
|
||||||
|
return axiosError;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an Error with the specified message, config, error code, request and response.
|
||||||
|
*
|
||||||
|
* @param {string} message The error message.
|
||||||
|
* @param {string} [code] The error code (for example, 'ECONNABORTED').
|
||||||
|
* @param {Object} [config] The config.
|
||||||
|
* @param {Object} [request] The request.
|
||||||
|
* @param {Object} [response] The response.
|
||||||
|
*
|
||||||
|
* @returns {Error} The created error.
|
||||||
|
*/
|
||||||
|
constructor(message, code, config, request, response) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'AxiosError';
|
||||||
|
this.isAxiosError = true;
|
||||||
|
code && (this.code = code);
|
||||||
|
config && (this.config = config);
|
||||||
|
request && (this.request = request);
|
||||||
|
if (response) {
|
||||||
|
this.response = response;
|
||||||
|
this.status = response.status;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toJSON() {
|
||||||
|
return {
|
||||||
|
// Standard
|
||||||
|
message: this.message,
|
||||||
|
name: this.name,
|
||||||
|
// Microsoft
|
||||||
|
description: this.description,
|
||||||
|
number: this.number,
|
||||||
|
// Mozilla
|
||||||
|
fileName: this.fileName,
|
||||||
|
lineNumber: this.lineNumber,
|
||||||
|
columnNumber: this.columnNumber,
|
||||||
|
stack: this.stack,
|
||||||
|
// Axios
|
||||||
|
config: utils.toJSONObject(this.config),
|
||||||
|
code: this.code,
|
||||||
|
status: this.status,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This can be changed to static properties as soon as the parser options in .eslint.cjs are updated.
|
||||||
|
AxiosError.ERR_BAD_OPTION_VALUE = 'ERR_BAD_OPTION_VALUE';
|
||||||
|
AxiosError.ERR_BAD_OPTION = 'ERR_BAD_OPTION';
|
||||||
|
AxiosError.ECONNABORTED = 'ECONNABORTED';
|
||||||
|
AxiosError.ETIMEDOUT = 'ETIMEDOUT';
|
||||||
|
AxiosError.ERR_NETWORK = 'ERR_NETWORK';
|
||||||
|
AxiosError.ERR_FR_TOO_MANY_REDIRECTS = 'ERR_FR_TOO_MANY_REDIRECTS';
|
||||||
|
AxiosError.ERR_DEPRECATED = 'ERR_DEPRECATED';
|
||||||
|
AxiosError.ERR_BAD_RESPONSE = 'ERR_BAD_RESPONSE';
|
||||||
|
AxiosError.ERR_BAD_REQUEST = 'ERR_BAD_REQUEST';
|
||||||
|
AxiosError.ERR_CANCELED = 'ERR_CANCELED';
|
||||||
|
AxiosError.ERR_NOT_SUPPORT = 'ERR_NOT_SUPPORT';
|
||||||
|
AxiosError.ERR_INVALID_URL = 'ERR_INVALID_URL';
|
||||||
|
|
||||||
|
export default AxiosError;
|
||||||
314
server/node_modules/axios/lib/core/AxiosHeaders.js
generated
vendored
Normal file
314
server/node_modules/axios/lib/core/AxiosHeaders.js
generated
vendored
Normal file
@@ -0,0 +1,314 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from '../utils.js';
|
||||||
|
import parseHeaders from '../helpers/parseHeaders.js';
|
||||||
|
|
||||||
|
const $internals = Symbol('internals');
|
||||||
|
|
||||||
|
function normalizeHeader(header) {
|
||||||
|
return header && String(header).trim().toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeValue(value) {
|
||||||
|
if (value === false || value == null) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return utils.isArray(value) ? value.map(normalizeValue) : String(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseTokens(str) {
|
||||||
|
const tokens = Object.create(null);
|
||||||
|
const tokensRE = /([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;
|
||||||
|
let match;
|
||||||
|
|
||||||
|
while ((match = tokensRE.exec(str))) {
|
||||||
|
tokens[match[1]] = match[2];
|
||||||
|
}
|
||||||
|
|
||||||
|
return tokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
const isValidHeaderName = (str) => /^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(str.trim());
|
||||||
|
|
||||||
|
function matchHeaderValue(context, value, header, filter, isHeaderNameFilter) {
|
||||||
|
if (utils.isFunction(filter)) {
|
||||||
|
return filter.call(this, value, header);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isHeaderNameFilter) {
|
||||||
|
value = header;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!utils.isString(value)) return;
|
||||||
|
|
||||||
|
if (utils.isString(filter)) {
|
||||||
|
return value.indexOf(filter) !== -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isRegExp(filter)) {
|
||||||
|
return filter.test(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatHeader(header) {
|
||||||
|
return header.trim()
|
||||||
|
.toLowerCase().replace(/([a-z\d])(\w*)/g, (w, char, str) => {
|
||||||
|
return char.toUpperCase() + str;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildAccessors(obj, header) {
|
||||||
|
const accessorName = utils.toCamelCase(' ' + header);
|
||||||
|
|
||||||
|
['get', 'set', 'has'].forEach(methodName => {
|
||||||
|
Object.defineProperty(obj, methodName + accessorName, {
|
||||||
|
value: function(arg1, arg2, arg3) {
|
||||||
|
return this[methodName].call(this, header, arg1, arg2, arg3);
|
||||||
|
},
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
class AxiosHeaders {
|
||||||
|
constructor(headers) {
|
||||||
|
headers && this.set(headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
set(header, valueOrRewrite, rewrite) {
|
||||||
|
const self = this;
|
||||||
|
|
||||||
|
function setHeader(_value, _header, _rewrite) {
|
||||||
|
const lHeader = normalizeHeader(_header);
|
||||||
|
|
||||||
|
if (!lHeader) {
|
||||||
|
throw new Error('header name must be a non-empty string');
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = utils.findKey(self, lHeader);
|
||||||
|
|
||||||
|
if(!key || self[key] === undefined || _rewrite === true || (_rewrite === undefined && self[key] !== false)) {
|
||||||
|
self[key || _header] = normalizeValue(_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const setHeaders = (headers, _rewrite) =>
|
||||||
|
utils.forEach(headers, (_value, _header) => setHeader(_value, _header, _rewrite));
|
||||||
|
|
||||||
|
if (utils.isPlainObject(header) || header instanceof this.constructor) {
|
||||||
|
setHeaders(header, valueOrRewrite)
|
||||||
|
} else if(utils.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) {
|
||||||
|
setHeaders(parseHeaders(header), valueOrRewrite);
|
||||||
|
} else if (utils.isObject(header) && utils.isIterable(header)) {
|
||||||
|
let obj = {}, dest, key;
|
||||||
|
for (const entry of header) {
|
||||||
|
if (!utils.isArray(entry)) {
|
||||||
|
throw TypeError('Object iterator must return a key-value pair');
|
||||||
|
}
|
||||||
|
|
||||||
|
obj[key = entry[0]] = (dest = obj[key]) ?
|
||||||
|
(utils.isArray(dest) ? [...dest, entry[1]] : [dest, entry[1]]) : entry[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
setHeaders(obj, valueOrRewrite)
|
||||||
|
} else {
|
||||||
|
header != null && setHeader(valueOrRewrite, header, rewrite);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
get(header, parser) {
|
||||||
|
header = normalizeHeader(header);
|
||||||
|
|
||||||
|
if (header) {
|
||||||
|
const key = utils.findKey(this, header);
|
||||||
|
|
||||||
|
if (key) {
|
||||||
|
const value = this[key];
|
||||||
|
|
||||||
|
if (!parser) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser === true) {
|
||||||
|
return parseTokens(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isFunction(parser)) {
|
||||||
|
return parser.call(this, value, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isRegExp(parser)) {
|
||||||
|
return parser.exec(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new TypeError('parser must be boolean|regexp|function');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
has(header, matcher) {
|
||||||
|
header = normalizeHeader(header);
|
||||||
|
|
||||||
|
if (header) {
|
||||||
|
const key = utils.findKey(this, header);
|
||||||
|
|
||||||
|
return !!(key && this[key] !== undefined && (!matcher || matchHeaderValue(this, this[key], key, matcher)));
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(header, matcher) {
|
||||||
|
const self = this;
|
||||||
|
let deleted = false;
|
||||||
|
|
||||||
|
function deleteHeader(_header) {
|
||||||
|
_header = normalizeHeader(_header);
|
||||||
|
|
||||||
|
if (_header) {
|
||||||
|
const key = utils.findKey(self, _header);
|
||||||
|
|
||||||
|
if (key && (!matcher || matchHeaderValue(self, self[key], key, matcher))) {
|
||||||
|
delete self[key];
|
||||||
|
|
||||||
|
deleted = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isArray(header)) {
|
||||||
|
header.forEach(deleteHeader);
|
||||||
|
} else {
|
||||||
|
deleteHeader(header);
|
||||||
|
}
|
||||||
|
|
||||||
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
clear(matcher) {
|
||||||
|
const keys = Object.keys(this);
|
||||||
|
let i = keys.length;
|
||||||
|
let deleted = false;
|
||||||
|
|
||||||
|
while (i--) {
|
||||||
|
const key = keys[i];
|
||||||
|
if(!matcher || matchHeaderValue(this, this[key], key, matcher, true)) {
|
||||||
|
delete this[key];
|
||||||
|
deleted = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
normalize(format) {
|
||||||
|
const self = this;
|
||||||
|
const headers = {};
|
||||||
|
|
||||||
|
utils.forEach(this, (value, header) => {
|
||||||
|
const key = utils.findKey(headers, header);
|
||||||
|
|
||||||
|
if (key) {
|
||||||
|
self[key] = normalizeValue(value);
|
||||||
|
delete self[header];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = format ? formatHeader(header) : String(header).trim();
|
||||||
|
|
||||||
|
if (normalized !== header) {
|
||||||
|
delete self[header];
|
||||||
|
}
|
||||||
|
|
||||||
|
self[normalized] = normalizeValue(value);
|
||||||
|
|
||||||
|
headers[normalized] = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
concat(...targets) {
|
||||||
|
return this.constructor.concat(this, ...targets);
|
||||||
|
}
|
||||||
|
|
||||||
|
toJSON(asStrings) {
|
||||||
|
const obj = Object.create(null);
|
||||||
|
|
||||||
|
utils.forEach(this, (value, header) => {
|
||||||
|
value != null && value !== false && (obj[header] = asStrings && utils.isArray(value) ? value.join(', ') : value);
|
||||||
|
});
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
[Symbol.iterator]() {
|
||||||
|
return Object.entries(this.toJSON())[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
|
||||||
|
toString() {
|
||||||
|
return Object.entries(this.toJSON()).map(([header, value]) => header + ': ' + value).join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
getSetCookie() {
|
||||||
|
return this.get("set-cookie") || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
get [Symbol.toStringTag]() {
|
||||||
|
return 'AxiosHeaders';
|
||||||
|
}
|
||||||
|
|
||||||
|
static from(thing) {
|
||||||
|
return thing instanceof this ? thing : new this(thing);
|
||||||
|
}
|
||||||
|
|
||||||
|
static concat(first, ...targets) {
|
||||||
|
const computed = new this(first);
|
||||||
|
|
||||||
|
targets.forEach((target) => computed.set(target));
|
||||||
|
|
||||||
|
return computed;
|
||||||
|
}
|
||||||
|
|
||||||
|
static accessor(header) {
|
||||||
|
const internals = this[$internals] = (this[$internals] = {
|
||||||
|
accessors: {}
|
||||||
|
});
|
||||||
|
|
||||||
|
const accessors = internals.accessors;
|
||||||
|
const prototype = this.prototype;
|
||||||
|
|
||||||
|
function defineAccessor(_header) {
|
||||||
|
const lHeader = normalizeHeader(_header);
|
||||||
|
|
||||||
|
if (!accessors[lHeader]) {
|
||||||
|
buildAccessors(prototype, _header);
|
||||||
|
accessors[lHeader] = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
utils.isArray(header) ? header.forEach(defineAccessor) : defineAccessor(header);
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AxiosHeaders.accessor(['Content-Type', 'Content-Length', 'Accept', 'Accept-Encoding', 'User-Agent', 'Authorization']);
|
||||||
|
|
||||||
|
// reserved names hotfix
|
||||||
|
utils.reduceDescriptors(AxiosHeaders.prototype, ({value}, key) => {
|
||||||
|
let mapped = key[0].toUpperCase() + key.slice(1); // map `set` => `Set`
|
||||||
|
return {
|
||||||
|
get: () => value,
|
||||||
|
set(headerValue) {
|
||||||
|
this[mapped] = headerValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
utils.freezeMethods(AxiosHeaders);
|
||||||
|
|
||||||
|
export default AxiosHeaders;
|
||||||
72
server/node_modules/axios/lib/core/InterceptorManager.js
generated
vendored
Normal file
72
server/node_modules/axios/lib/core/InterceptorManager.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from '../utils.js';
|
||||||
|
|
||||||
|
class InterceptorManager {
|
||||||
|
constructor() {
|
||||||
|
this.handlers = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new interceptor to the stack
|
||||||
|
*
|
||||||
|
* @param {Function} fulfilled The function to handle `then` for a `Promise`
|
||||||
|
* @param {Function} rejected The function to handle `reject` for a `Promise`
|
||||||
|
* @param {Object} options The options for the interceptor, synchronous and runWhen
|
||||||
|
*
|
||||||
|
* @return {Number} An ID used to remove interceptor later
|
||||||
|
*/
|
||||||
|
use(fulfilled, rejected, options) {
|
||||||
|
this.handlers.push({
|
||||||
|
fulfilled,
|
||||||
|
rejected,
|
||||||
|
synchronous: options ? options.synchronous : false,
|
||||||
|
runWhen: options ? options.runWhen : null
|
||||||
|
});
|
||||||
|
return this.handlers.length - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove an interceptor from the stack
|
||||||
|
*
|
||||||
|
* @param {Number} id The ID that was returned by `use`
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
eject(id) {
|
||||||
|
if (this.handlers[id]) {
|
||||||
|
this.handlers[id] = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all interceptors from the stack
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
clear() {
|
||||||
|
if (this.handlers) {
|
||||||
|
this.handlers = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over all the registered interceptors
|
||||||
|
*
|
||||||
|
* This method is particularly useful for skipping over any
|
||||||
|
* interceptors that may have become `null` calling `eject`.
|
||||||
|
*
|
||||||
|
* @param {Function} fn The function to call for each interceptor
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
forEach(fn) {
|
||||||
|
utils.forEach(this.handlers, function forEachHandler(h) {
|
||||||
|
if (h !== null) {
|
||||||
|
fn(h);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default InterceptorManager;
|
||||||
8
server/node_modules/axios/lib/core/README.md
generated
vendored
Normal file
8
server/node_modules/axios/lib/core/README.md
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# axios // core
|
||||||
|
|
||||||
|
The modules found in `core/` should be modules that are specific to the domain logic of axios. These modules would most likely not make sense to be consumed outside of the axios module, as their logic is too specific. Some examples of core modules are:
|
||||||
|
|
||||||
|
- Dispatching requests
|
||||||
|
- Requests sent via `adapters/` (see lib/adapters/README.md)
|
||||||
|
- Managing interceptors
|
||||||
|
- Handling config
|
||||||
22
server/node_modules/axios/lib/core/buildFullPath.js
generated
vendored
Normal file
22
server/node_modules/axios/lib/core/buildFullPath.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import isAbsoluteURL from '../helpers/isAbsoluteURL.js';
|
||||||
|
import combineURLs from '../helpers/combineURLs.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new URL by combining the baseURL with the requestedURL,
|
||||||
|
* only when the requestedURL is not already an absolute URL.
|
||||||
|
* If the requestURL is absolute, this function returns the requestedURL untouched.
|
||||||
|
*
|
||||||
|
* @param {string} baseURL The base URL
|
||||||
|
* @param {string} requestedURL Absolute or relative URL to combine
|
||||||
|
*
|
||||||
|
* @returns {string} The combined full path
|
||||||
|
*/
|
||||||
|
export default function buildFullPath(baseURL, requestedURL, allowAbsoluteUrls) {
|
||||||
|
let isRelativeUrl = !isAbsoluteURL(requestedURL);
|
||||||
|
if (baseURL && (isRelativeUrl || allowAbsoluteUrls == false)) {
|
||||||
|
return combineURLs(baseURL, requestedURL);
|
||||||
|
}
|
||||||
|
return requestedURL;
|
||||||
|
}
|
||||||
81
server/node_modules/axios/lib/core/dispatchRequest.js
generated
vendored
Normal file
81
server/node_modules/axios/lib/core/dispatchRequest.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import transformData from './transformData.js';
|
||||||
|
import isCancel from '../cancel/isCancel.js';
|
||||||
|
import defaults from '../defaults/index.js';
|
||||||
|
import CanceledError from '../cancel/CanceledError.js';
|
||||||
|
import AxiosHeaders from '../core/AxiosHeaders.js';
|
||||||
|
import adapters from "../adapters/adapters.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Throws a `CanceledError` if cancellation has been requested.
|
||||||
|
*
|
||||||
|
* @param {Object} config The config that is to be used for the request
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function throwIfCancellationRequested(config) {
|
||||||
|
if (config.cancelToken) {
|
||||||
|
config.cancelToken.throwIfRequested();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.signal && config.signal.aborted) {
|
||||||
|
throw new CanceledError(null, config);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dispatch a request to the server using the configured adapter.
|
||||||
|
*
|
||||||
|
* @param {object} config The config that is to be used for the request
|
||||||
|
*
|
||||||
|
* @returns {Promise} The Promise to be fulfilled
|
||||||
|
*/
|
||||||
|
export default function dispatchRequest(config) {
|
||||||
|
throwIfCancellationRequested(config);
|
||||||
|
|
||||||
|
config.headers = AxiosHeaders.from(config.headers);
|
||||||
|
|
||||||
|
// Transform request data
|
||||||
|
config.data = transformData.call(
|
||||||
|
config,
|
||||||
|
config.transformRequest
|
||||||
|
);
|
||||||
|
|
||||||
|
if (['post', 'put', 'patch'].indexOf(config.method) !== -1) {
|
||||||
|
config.headers.setContentType('application/x-www-form-urlencoded', false);
|
||||||
|
}
|
||||||
|
|
||||||
|
const adapter = adapters.getAdapter(config.adapter || defaults.adapter, config);
|
||||||
|
|
||||||
|
return adapter(config).then(function onAdapterResolution(response) {
|
||||||
|
throwIfCancellationRequested(config);
|
||||||
|
|
||||||
|
// Transform response data
|
||||||
|
response.data = transformData.call(
|
||||||
|
config,
|
||||||
|
config.transformResponse,
|
||||||
|
response
|
||||||
|
);
|
||||||
|
|
||||||
|
response.headers = AxiosHeaders.from(response.headers);
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}, function onAdapterRejection(reason) {
|
||||||
|
if (!isCancel(reason)) {
|
||||||
|
throwIfCancellationRequested(config);
|
||||||
|
|
||||||
|
// Transform response data
|
||||||
|
if (reason && reason.response) {
|
||||||
|
reason.response.data = transformData.call(
|
||||||
|
config,
|
||||||
|
config.transformResponse,
|
||||||
|
reason.response
|
||||||
|
);
|
||||||
|
reason.response.headers = AxiosHeaders.from(reason.response.headers);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.reject(reason);
|
||||||
|
});
|
||||||
|
}
|
||||||
119
server/node_modules/axios/lib/core/mergeConfig.js
generated
vendored
Normal file
119
server/node_modules/axios/lib/core/mergeConfig.js
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
import utils from "../utils.js";
|
||||||
|
import AxiosHeaders from "./AxiosHeaders.js";
|
||||||
|
|
||||||
|
const headersToObject = (thing) =>
|
||||||
|
thing instanceof AxiosHeaders ? { ...thing } : thing;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Config-specific merge-function which creates a new config-object
|
||||||
|
* by merging two configuration objects together.
|
||||||
|
*
|
||||||
|
* @param {Object} config1
|
||||||
|
* @param {Object} config2
|
||||||
|
*
|
||||||
|
* @returns {Object} New object resulting from merging config2 to config1
|
||||||
|
*/
|
||||||
|
export default function mergeConfig(config1, config2) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
config2 = config2 || {};
|
||||||
|
const config = {};
|
||||||
|
|
||||||
|
function getMergedValue(target, source, prop, caseless) {
|
||||||
|
if (utils.isPlainObject(target) && utils.isPlainObject(source)) {
|
||||||
|
return utils.merge.call({ caseless }, target, source);
|
||||||
|
} else if (utils.isPlainObject(source)) {
|
||||||
|
return utils.merge({}, source);
|
||||||
|
} else if (utils.isArray(source)) {
|
||||||
|
return source.slice();
|
||||||
|
}
|
||||||
|
return source;
|
||||||
|
}
|
||||||
|
|
||||||
|
function mergeDeepProperties(a, b, prop, caseless) {
|
||||||
|
if (!utils.isUndefined(b)) {
|
||||||
|
return getMergedValue(a, b, prop, caseless);
|
||||||
|
} else if (!utils.isUndefined(a)) {
|
||||||
|
return getMergedValue(undefined, a, prop, caseless);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line consistent-return
|
||||||
|
function valueFromConfig2(a, b) {
|
||||||
|
if (!utils.isUndefined(b)) {
|
||||||
|
return getMergedValue(undefined, b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line consistent-return
|
||||||
|
function defaultToConfig2(a, b) {
|
||||||
|
if (!utils.isUndefined(b)) {
|
||||||
|
return getMergedValue(undefined, b);
|
||||||
|
} else if (!utils.isUndefined(a)) {
|
||||||
|
return getMergedValue(undefined, a);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line consistent-return
|
||||||
|
function mergeDirectKeys(a, b, prop) {
|
||||||
|
if (prop in config2) {
|
||||||
|
return getMergedValue(a, b);
|
||||||
|
} else if (prop in config1) {
|
||||||
|
return getMergedValue(undefined, a);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mergeMap = {
|
||||||
|
url: valueFromConfig2,
|
||||||
|
method: valueFromConfig2,
|
||||||
|
data: valueFromConfig2,
|
||||||
|
baseURL: defaultToConfig2,
|
||||||
|
transformRequest: defaultToConfig2,
|
||||||
|
transformResponse: defaultToConfig2,
|
||||||
|
paramsSerializer: defaultToConfig2,
|
||||||
|
timeout: defaultToConfig2,
|
||||||
|
timeoutMessage: defaultToConfig2,
|
||||||
|
withCredentials: defaultToConfig2,
|
||||||
|
withXSRFToken: defaultToConfig2,
|
||||||
|
adapter: defaultToConfig2,
|
||||||
|
responseType: defaultToConfig2,
|
||||||
|
xsrfCookieName: defaultToConfig2,
|
||||||
|
xsrfHeaderName: defaultToConfig2,
|
||||||
|
onUploadProgress: defaultToConfig2,
|
||||||
|
onDownloadProgress: defaultToConfig2,
|
||||||
|
decompress: defaultToConfig2,
|
||||||
|
maxContentLength: defaultToConfig2,
|
||||||
|
maxBodyLength: defaultToConfig2,
|
||||||
|
beforeRedirect: defaultToConfig2,
|
||||||
|
transport: defaultToConfig2,
|
||||||
|
httpAgent: defaultToConfig2,
|
||||||
|
httpsAgent: defaultToConfig2,
|
||||||
|
cancelToken: defaultToConfig2,
|
||||||
|
socketPath: defaultToConfig2,
|
||||||
|
responseEncoding: defaultToConfig2,
|
||||||
|
validateStatus: mergeDirectKeys,
|
||||||
|
headers: (a, b, prop) =>
|
||||||
|
mergeDeepProperties(headersToObject(a), headersToObject(b), prop, true),
|
||||||
|
};
|
||||||
|
|
||||||
|
utils.forEach(
|
||||||
|
Object.keys({ ...config1, ...config2 }),
|
||||||
|
function computeConfigValue(prop) {
|
||||||
|
if (
|
||||||
|
prop === "__proto__" ||
|
||||||
|
prop === "constructor" ||
|
||||||
|
prop === "prototype"
|
||||||
|
)
|
||||||
|
return;
|
||||||
|
const merge = utils.hasOwnProp(mergeMap, prop)
|
||||||
|
? mergeMap[prop]
|
||||||
|
: mergeDeepProperties;
|
||||||
|
const configValue = merge(config1[prop], config2[prop], prop);
|
||||||
|
(utils.isUndefined(configValue) && merge !== mergeDirectKeys) ||
|
||||||
|
(config[prop] = configValue);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
27
server/node_modules/axios/lib/core/settle.js
generated
vendored
Normal file
27
server/node_modules/axios/lib/core/settle.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import AxiosError from './AxiosError.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve or reject a Promise based on response status.
|
||||||
|
*
|
||||||
|
* @param {Function} resolve A function that resolves the promise.
|
||||||
|
* @param {Function} reject A function that rejects the promise.
|
||||||
|
* @param {object} response The response.
|
||||||
|
*
|
||||||
|
* @returns {object} The response.
|
||||||
|
*/
|
||||||
|
export default function settle(resolve, reject, response) {
|
||||||
|
const validateStatus = response.config.validateStatus;
|
||||||
|
if (!response.status || !validateStatus || validateStatus(response.status)) {
|
||||||
|
resolve(response);
|
||||||
|
} else {
|
||||||
|
reject(new AxiosError(
|
||||||
|
'Request failed with status code ' + response.status,
|
||||||
|
[AxiosError.ERR_BAD_REQUEST, AxiosError.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4],
|
||||||
|
response.config,
|
||||||
|
response.request,
|
||||||
|
response
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
28
server/node_modules/axios/lib/core/transformData.js
generated
vendored
Normal file
28
server/node_modules/axios/lib/core/transformData.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from '../utils.js';
|
||||||
|
import defaults from '../defaults/index.js';
|
||||||
|
import AxiosHeaders from '../core/AxiosHeaders.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform the data for a request or a response
|
||||||
|
*
|
||||||
|
* @param {Array|Function} fns A single function or Array of functions
|
||||||
|
* @param {?Object} response The response object
|
||||||
|
*
|
||||||
|
* @returns {*} The resulting transformed data
|
||||||
|
*/
|
||||||
|
export default function transformData(fns, response) {
|
||||||
|
const config = this || defaults;
|
||||||
|
const context = response || config;
|
||||||
|
const headers = AxiosHeaders.from(context.headers);
|
||||||
|
let data = context.data;
|
||||||
|
|
||||||
|
utils.forEach(fns, function transform(fn) {
|
||||||
|
data = fn.call(config, data, headers.normalize(), response ? response.status : undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
headers.normalize();
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
161
server/node_modules/axios/lib/defaults/index.js
generated
vendored
Normal file
161
server/node_modules/axios/lib/defaults/index.js
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from '../utils.js';
|
||||||
|
import AxiosError from '../core/AxiosError.js';
|
||||||
|
import transitionalDefaults from './transitional.js';
|
||||||
|
import toFormData from '../helpers/toFormData.js';
|
||||||
|
import toURLEncodedForm from '../helpers/toURLEncodedForm.js';
|
||||||
|
import platform from '../platform/index.js';
|
||||||
|
import formDataToJSON from '../helpers/formDataToJSON.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It takes a string, tries to parse it, and if it fails, it returns the stringified version
|
||||||
|
* of the input
|
||||||
|
*
|
||||||
|
* @param {any} rawValue - The value to be stringified.
|
||||||
|
* @param {Function} parser - A function that parses a string into a JavaScript object.
|
||||||
|
* @param {Function} encoder - A function that takes a value and returns a string.
|
||||||
|
*
|
||||||
|
* @returns {string} A stringified version of the rawValue.
|
||||||
|
*/
|
||||||
|
function stringifySafely(rawValue, parser, encoder) {
|
||||||
|
if (utils.isString(rawValue)) {
|
||||||
|
try {
|
||||||
|
(parser || JSON.parse)(rawValue);
|
||||||
|
return utils.trim(rawValue);
|
||||||
|
} catch (e) {
|
||||||
|
if (e.name !== 'SyntaxError') {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (encoder || JSON.stringify)(rawValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaults = {
|
||||||
|
|
||||||
|
transitional: transitionalDefaults,
|
||||||
|
|
||||||
|
adapter: ['xhr', 'http', 'fetch'],
|
||||||
|
|
||||||
|
transformRequest: [function transformRequest(data, headers) {
|
||||||
|
const contentType = headers.getContentType() || '';
|
||||||
|
const hasJSONContentType = contentType.indexOf('application/json') > -1;
|
||||||
|
const isObjectPayload = utils.isObject(data);
|
||||||
|
|
||||||
|
if (isObjectPayload && utils.isHTMLForm(data)) {
|
||||||
|
data = new FormData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isFormData = utils.isFormData(data);
|
||||||
|
|
||||||
|
if (isFormData) {
|
||||||
|
return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (utils.isArrayBuffer(data) ||
|
||||||
|
utils.isBuffer(data) ||
|
||||||
|
utils.isStream(data) ||
|
||||||
|
utils.isFile(data) ||
|
||||||
|
utils.isBlob(data) ||
|
||||||
|
utils.isReadableStream(data)
|
||||||
|
) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (utils.isArrayBufferView(data)) {
|
||||||
|
return data.buffer;
|
||||||
|
}
|
||||||
|
if (utils.isURLSearchParams(data)) {
|
||||||
|
headers.setContentType('application/x-www-form-urlencoded;charset=utf-8', false);
|
||||||
|
return data.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
let isFileList;
|
||||||
|
|
||||||
|
if (isObjectPayload) {
|
||||||
|
if (contentType.indexOf('application/x-www-form-urlencoded') > -1) {
|
||||||
|
return toURLEncodedForm(data, this.formSerializer).toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((isFileList = utils.isFileList(data)) || contentType.indexOf('multipart/form-data') > -1) {
|
||||||
|
const _FormData = this.env && this.env.FormData;
|
||||||
|
|
||||||
|
return toFormData(
|
||||||
|
isFileList ? {'files[]': data} : data,
|
||||||
|
_FormData && new _FormData(),
|
||||||
|
this.formSerializer
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isObjectPayload || hasJSONContentType ) {
|
||||||
|
headers.setContentType('application/json', false);
|
||||||
|
return stringifySafely(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}],
|
||||||
|
|
||||||
|
transformResponse: [function transformResponse(data) {
|
||||||
|
const transitional = this.transitional || defaults.transitional;
|
||||||
|
const forcedJSONParsing = transitional && transitional.forcedJSONParsing;
|
||||||
|
const JSONRequested = this.responseType === 'json';
|
||||||
|
|
||||||
|
if (utils.isResponse(data) || utils.isReadableStream(data)) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data && utils.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) {
|
||||||
|
const silentJSONParsing = transitional && transitional.silentJSONParsing;
|
||||||
|
const strictJSONParsing = !silentJSONParsing && JSONRequested;
|
||||||
|
|
||||||
|
try {
|
||||||
|
return JSON.parse(data, this.parseReviver);
|
||||||
|
} catch (e) {
|
||||||
|
if (strictJSONParsing) {
|
||||||
|
if (e.name === 'SyntaxError') {
|
||||||
|
throw AxiosError.from(e, AxiosError.ERR_BAD_RESPONSE, this, null, this.response);
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}],
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A timeout in milliseconds to abort a request. If set to 0 (default) a
|
||||||
|
* timeout is not created.
|
||||||
|
*/
|
||||||
|
timeout: 0,
|
||||||
|
|
||||||
|
xsrfCookieName: 'XSRF-TOKEN',
|
||||||
|
xsrfHeaderName: 'X-XSRF-TOKEN',
|
||||||
|
|
||||||
|
maxContentLength: -1,
|
||||||
|
maxBodyLength: -1,
|
||||||
|
|
||||||
|
env: {
|
||||||
|
FormData: platform.classes.FormData,
|
||||||
|
Blob: platform.classes.Blob
|
||||||
|
},
|
||||||
|
|
||||||
|
validateStatus: function validateStatus(status) {
|
||||||
|
return status >= 200 && status < 300;
|
||||||
|
},
|
||||||
|
|
||||||
|
headers: {
|
||||||
|
common: {
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
'Content-Type': undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
utils.forEach(['delete', 'get', 'head', 'post', 'put', 'patch'], (method) => {
|
||||||
|
defaults.headers[method] = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
export default defaults;
|
||||||
8
server/node_modules/axios/lib/defaults/transitional.js
generated
vendored
Normal file
8
server/node_modules/axios/lib/defaults/transitional.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
export default {
|
||||||
|
silentJSONParsing: true,
|
||||||
|
forcedJSONParsing: true,
|
||||||
|
clarifyTimeoutError: false,
|
||||||
|
legacyInterceptorReqResOrdering: true
|
||||||
|
};
|
||||||
3
server/node_modules/axios/lib/env/README.md
generated
vendored
Normal file
3
server/node_modules/axios/lib/env/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# axios // env
|
||||||
|
|
||||||
|
The `data.js` file is updated automatically when the package version is upgrading. Please do not edit it manually.
|
||||||
2
server/node_modules/axios/lib/env/classes/FormData.js
generated
vendored
Normal file
2
server/node_modules/axios/lib/env/classes/FormData.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
import _FormData from 'form-data';
|
||||||
|
export default typeof FormData !== 'undefined' ? FormData : _FormData;
|
||||||
1
server/node_modules/axios/lib/env/data.js
generated
vendored
Normal file
1
server/node_modules/axios/lib/env/data.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export const VERSION = "1.13.5";
|
||||||
143
server/node_modules/axios/lib/helpers/AxiosTransformStream.js
generated
vendored
Normal file
143
server/node_modules/axios/lib/helpers/AxiosTransformStream.js
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import stream from 'stream';
|
||||||
|
import utils from '../utils.js';
|
||||||
|
|
||||||
|
const kInternals = Symbol('internals');
|
||||||
|
|
||||||
|
class AxiosTransformStream extends stream.Transform{
|
||||||
|
constructor(options) {
|
||||||
|
options = utils.toFlatObject(options, {
|
||||||
|
maxRate: 0,
|
||||||
|
chunkSize: 64 * 1024,
|
||||||
|
minChunkSize: 100,
|
||||||
|
timeWindow: 500,
|
||||||
|
ticksRate: 2,
|
||||||
|
samplesCount: 15
|
||||||
|
}, null, (prop, source) => {
|
||||||
|
return !utils.isUndefined(source[prop]);
|
||||||
|
});
|
||||||
|
|
||||||
|
super({
|
||||||
|
readableHighWaterMark: options.chunkSize
|
||||||
|
});
|
||||||
|
|
||||||
|
const internals = this[kInternals] = {
|
||||||
|
timeWindow: options.timeWindow,
|
||||||
|
chunkSize: options.chunkSize,
|
||||||
|
maxRate: options.maxRate,
|
||||||
|
minChunkSize: options.minChunkSize,
|
||||||
|
bytesSeen: 0,
|
||||||
|
isCaptured: false,
|
||||||
|
notifiedBytesLoaded: 0,
|
||||||
|
ts: Date.now(),
|
||||||
|
bytes: 0,
|
||||||
|
onReadCallback: null
|
||||||
|
};
|
||||||
|
|
||||||
|
this.on('newListener', event => {
|
||||||
|
if (event === 'progress') {
|
||||||
|
if (!internals.isCaptured) {
|
||||||
|
internals.isCaptured = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_read(size) {
|
||||||
|
const internals = this[kInternals];
|
||||||
|
|
||||||
|
if (internals.onReadCallback) {
|
||||||
|
internals.onReadCallback();
|
||||||
|
}
|
||||||
|
|
||||||
|
return super._read(size);
|
||||||
|
}
|
||||||
|
|
||||||
|
_transform(chunk, encoding, callback) {
|
||||||
|
const internals = this[kInternals];
|
||||||
|
const maxRate = internals.maxRate;
|
||||||
|
|
||||||
|
const readableHighWaterMark = this.readableHighWaterMark;
|
||||||
|
|
||||||
|
const timeWindow = internals.timeWindow;
|
||||||
|
|
||||||
|
const divider = 1000 / timeWindow;
|
||||||
|
const bytesThreshold = (maxRate / divider);
|
||||||
|
const minChunkSize = internals.minChunkSize !== false ? Math.max(internals.minChunkSize, bytesThreshold * 0.01) : 0;
|
||||||
|
|
||||||
|
const pushChunk = (_chunk, _callback) => {
|
||||||
|
const bytes = Buffer.byteLength(_chunk);
|
||||||
|
internals.bytesSeen += bytes;
|
||||||
|
internals.bytes += bytes;
|
||||||
|
|
||||||
|
internals.isCaptured && this.emit('progress', internals.bytesSeen);
|
||||||
|
|
||||||
|
if (this.push(_chunk)) {
|
||||||
|
process.nextTick(_callback);
|
||||||
|
} else {
|
||||||
|
internals.onReadCallback = () => {
|
||||||
|
internals.onReadCallback = null;
|
||||||
|
process.nextTick(_callback);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const transformChunk = (_chunk, _callback) => {
|
||||||
|
const chunkSize = Buffer.byteLength(_chunk);
|
||||||
|
let chunkRemainder = null;
|
||||||
|
let maxChunkSize = readableHighWaterMark;
|
||||||
|
let bytesLeft;
|
||||||
|
let passed = 0;
|
||||||
|
|
||||||
|
if (maxRate) {
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
if (!internals.ts || (passed = (now - internals.ts)) >= timeWindow) {
|
||||||
|
internals.ts = now;
|
||||||
|
bytesLeft = bytesThreshold - internals.bytes;
|
||||||
|
internals.bytes = bytesLeft < 0 ? -bytesLeft : 0;
|
||||||
|
passed = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bytesLeft = bytesThreshold - internals.bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxRate) {
|
||||||
|
if (bytesLeft <= 0) {
|
||||||
|
// next time window
|
||||||
|
return setTimeout(() => {
|
||||||
|
_callback(null, _chunk);
|
||||||
|
}, timeWindow - passed);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bytesLeft < maxChunkSize) {
|
||||||
|
maxChunkSize = bytesLeft;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxChunkSize && chunkSize > maxChunkSize && (chunkSize - maxChunkSize) > minChunkSize) {
|
||||||
|
chunkRemainder = _chunk.subarray(maxChunkSize);
|
||||||
|
_chunk = _chunk.subarray(0, maxChunkSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
pushChunk(_chunk, chunkRemainder ? () => {
|
||||||
|
process.nextTick(_callback, null, chunkRemainder);
|
||||||
|
} : _callback);
|
||||||
|
};
|
||||||
|
|
||||||
|
transformChunk(chunk, function transformNextChunk(err, _chunk) {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_chunk) {
|
||||||
|
transformChunk(_chunk, transformNextChunk);
|
||||||
|
} else {
|
||||||
|
callback(null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default AxiosTransformStream;
|
||||||
58
server/node_modules/axios/lib/helpers/AxiosURLSearchParams.js
generated
vendored
Normal file
58
server/node_modules/axios/lib/helpers/AxiosURLSearchParams.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import toFormData from './toFormData.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It encodes a string by replacing all characters that are not in the unreserved set with
|
||||||
|
* their percent-encoded equivalents
|
||||||
|
*
|
||||||
|
* @param {string} str - The string to encode.
|
||||||
|
*
|
||||||
|
* @returns {string} The encoded string.
|
||||||
|
*/
|
||||||
|
function encode(str) {
|
||||||
|
const charMap = {
|
||||||
|
'!': '%21',
|
||||||
|
"'": '%27',
|
||||||
|
'(': '%28',
|
||||||
|
')': '%29',
|
||||||
|
'~': '%7E',
|
||||||
|
'%20': '+',
|
||||||
|
'%00': '\x00'
|
||||||
|
};
|
||||||
|
return encodeURIComponent(str).replace(/[!'()~]|%20|%00/g, function replacer(match) {
|
||||||
|
return charMap[match];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It takes a params object and converts it to a FormData object
|
||||||
|
*
|
||||||
|
* @param {Object<string, any>} params - The parameters to be converted to a FormData object.
|
||||||
|
* @param {Object<string, any>} options - The options object passed to the Axios constructor.
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function AxiosURLSearchParams(params, options) {
|
||||||
|
this._pairs = [];
|
||||||
|
|
||||||
|
params && toFormData(params, this, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
const prototype = AxiosURLSearchParams.prototype;
|
||||||
|
|
||||||
|
prototype.append = function append(name, value) {
|
||||||
|
this._pairs.push([name, value]);
|
||||||
|
};
|
||||||
|
|
||||||
|
prototype.toString = function toString(encoder) {
|
||||||
|
const _encode = encoder ? function(value) {
|
||||||
|
return encoder.call(this, value, encode);
|
||||||
|
} : encode;
|
||||||
|
|
||||||
|
return this._pairs.map(function each(pair) {
|
||||||
|
return _encode(pair[0]) + '=' + _encode(pair[1]);
|
||||||
|
}, '').join('&');
|
||||||
|
};
|
||||||
|
|
||||||
|
export default AxiosURLSearchParams;
|
||||||
77
server/node_modules/axios/lib/helpers/HttpStatusCode.js
generated
vendored
Normal file
77
server/node_modules/axios/lib/helpers/HttpStatusCode.js
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
const HttpStatusCode = {
|
||||||
|
Continue: 100,
|
||||||
|
SwitchingProtocols: 101,
|
||||||
|
Processing: 102,
|
||||||
|
EarlyHints: 103,
|
||||||
|
Ok: 200,
|
||||||
|
Created: 201,
|
||||||
|
Accepted: 202,
|
||||||
|
NonAuthoritativeInformation: 203,
|
||||||
|
NoContent: 204,
|
||||||
|
ResetContent: 205,
|
||||||
|
PartialContent: 206,
|
||||||
|
MultiStatus: 207,
|
||||||
|
AlreadyReported: 208,
|
||||||
|
ImUsed: 226,
|
||||||
|
MultipleChoices: 300,
|
||||||
|
MovedPermanently: 301,
|
||||||
|
Found: 302,
|
||||||
|
SeeOther: 303,
|
||||||
|
NotModified: 304,
|
||||||
|
UseProxy: 305,
|
||||||
|
Unused: 306,
|
||||||
|
TemporaryRedirect: 307,
|
||||||
|
PermanentRedirect: 308,
|
||||||
|
BadRequest: 400,
|
||||||
|
Unauthorized: 401,
|
||||||
|
PaymentRequired: 402,
|
||||||
|
Forbidden: 403,
|
||||||
|
NotFound: 404,
|
||||||
|
MethodNotAllowed: 405,
|
||||||
|
NotAcceptable: 406,
|
||||||
|
ProxyAuthenticationRequired: 407,
|
||||||
|
RequestTimeout: 408,
|
||||||
|
Conflict: 409,
|
||||||
|
Gone: 410,
|
||||||
|
LengthRequired: 411,
|
||||||
|
PreconditionFailed: 412,
|
||||||
|
PayloadTooLarge: 413,
|
||||||
|
UriTooLong: 414,
|
||||||
|
UnsupportedMediaType: 415,
|
||||||
|
RangeNotSatisfiable: 416,
|
||||||
|
ExpectationFailed: 417,
|
||||||
|
ImATeapot: 418,
|
||||||
|
MisdirectedRequest: 421,
|
||||||
|
UnprocessableEntity: 422,
|
||||||
|
Locked: 423,
|
||||||
|
FailedDependency: 424,
|
||||||
|
TooEarly: 425,
|
||||||
|
UpgradeRequired: 426,
|
||||||
|
PreconditionRequired: 428,
|
||||||
|
TooManyRequests: 429,
|
||||||
|
RequestHeaderFieldsTooLarge: 431,
|
||||||
|
UnavailableForLegalReasons: 451,
|
||||||
|
InternalServerError: 500,
|
||||||
|
NotImplemented: 501,
|
||||||
|
BadGateway: 502,
|
||||||
|
ServiceUnavailable: 503,
|
||||||
|
GatewayTimeout: 504,
|
||||||
|
HttpVersionNotSupported: 505,
|
||||||
|
VariantAlsoNegotiates: 506,
|
||||||
|
InsufficientStorage: 507,
|
||||||
|
LoopDetected: 508,
|
||||||
|
NotExtended: 510,
|
||||||
|
NetworkAuthenticationRequired: 511,
|
||||||
|
WebServerIsDown: 521,
|
||||||
|
ConnectionTimedOut: 522,
|
||||||
|
OriginIsUnreachable: 523,
|
||||||
|
TimeoutOccurred: 524,
|
||||||
|
SslHandshakeFailed: 525,
|
||||||
|
InvalidSslCertificate: 526,
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.entries(HttpStatusCode).forEach(([key, value]) => {
|
||||||
|
HttpStatusCode[value] = key;
|
||||||
|
});
|
||||||
|
|
||||||
|
export default HttpStatusCode;
|
||||||
7
server/node_modules/axios/lib/helpers/README.md
generated
vendored
Normal file
7
server/node_modules/axios/lib/helpers/README.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# axios // helpers
|
||||||
|
|
||||||
|
The modules found in `helpers/` should be generic modules that are _not_ specific to the domain logic of axios. These modules could theoretically be published to npm on their own and consumed by other modules or apps. Some examples of generic modules are things like:
|
||||||
|
|
||||||
|
- Browser polyfills
|
||||||
|
- Managing cookies
|
||||||
|
- Parsing HTTP headers
|
||||||
28
server/node_modules/axios/lib/helpers/ZlibHeaderTransformStream.js
generated
vendored
Normal file
28
server/node_modules/axios/lib/helpers/ZlibHeaderTransformStream.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
import stream from "stream";
|
||||||
|
|
||||||
|
class ZlibHeaderTransformStream extends stream.Transform {
|
||||||
|
__transform(chunk, encoding, callback) {
|
||||||
|
this.push(chunk);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
_transform(chunk, encoding, callback) {
|
||||||
|
if (chunk.length !== 0) {
|
||||||
|
this._transform = this.__transform;
|
||||||
|
|
||||||
|
// Add Default Compression headers if no zlib headers are present
|
||||||
|
if (chunk[0] !== 120) { // Hex: 78
|
||||||
|
const header = Buffer.alloc(2);
|
||||||
|
header[0] = 120; // Hex: 78
|
||||||
|
header[1] = 156; // Hex: 9C
|
||||||
|
this.push(header, encoding);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.__transform(chunk, encoding, callback);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ZlibHeaderTransformStream;
|
||||||
14
server/node_modules/axios/lib/helpers/bind.js
generated
vendored
Normal file
14
server/node_modules/axios/lib/helpers/bind.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a bound version of a function with a specified `this` context
|
||||||
|
*
|
||||||
|
* @param {Function} fn - The function to bind
|
||||||
|
* @param {*} thisArg - The value to be passed as the `this` parameter
|
||||||
|
* @returns {Function} A new function that will call the original function with the specified `this` context
|
||||||
|
*/
|
||||||
|
export default function bind(fn, thisArg) {
|
||||||
|
return function wrap() {
|
||||||
|
return fn.apply(thisArg, arguments);
|
||||||
|
};
|
||||||
|
}
|
||||||
64
server/node_modules/axios/lib/helpers/buildURL.js
generated
vendored
Normal file
64
server/node_modules/axios/lib/helpers/buildURL.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
import utils from '../utils.js';
|
||||||
|
import AxiosURLSearchParams from '../helpers/AxiosURLSearchParams.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It replaces all instances of the characters `:`, `$`, `,`, `+`, `[`, and `]` with their
|
||||||
|
* URI encoded counterparts
|
||||||
|
*
|
||||||
|
* @param {string} val The value to be encoded.
|
||||||
|
*
|
||||||
|
* @returns {string} The encoded value.
|
||||||
|
*/
|
||||||
|
function encode(val) {
|
||||||
|
return encodeURIComponent(val).
|
||||||
|
replace(/%3A/gi, ':').
|
||||||
|
replace(/%24/g, '$').
|
||||||
|
replace(/%2C/gi, ',').
|
||||||
|
replace(/%20/g, '+');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a URL by appending params to the end
|
||||||
|
*
|
||||||
|
* @param {string} url The base of the url (e.g., http://www.google.com)
|
||||||
|
* @param {object} [params] The params to be appended
|
||||||
|
* @param {?(object|Function)} options
|
||||||
|
*
|
||||||
|
* @returns {string} The formatted url
|
||||||
|
*/
|
||||||
|
export default function buildURL(url, params, options) {
|
||||||
|
if (!params) {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
const _encode = options && options.encode || encode;
|
||||||
|
|
||||||
|
const _options = utils.isFunction(options) ? {
|
||||||
|
serialize: options
|
||||||
|
} : options;
|
||||||
|
|
||||||
|
const serializeFn = _options && _options.serialize;
|
||||||
|
|
||||||
|
let serializedParams;
|
||||||
|
|
||||||
|
if (serializeFn) {
|
||||||
|
serializedParams = serializeFn(params, _options);
|
||||||
|
} else {
|
||||||
|
serializedParams = utils.isURLSearchParams(params) ?
|
||||||
|
params.toString() :
|
||||||
|
new AxiosURLSearchParams(params, _options).toString(_encode);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (serializedParams) {
|
||||||
|
const hashmarkIndex = url.indexOf("#");
|
||||||
|
|
||||||
|
if (hashmarkIndex !== -1) {
|
||||||
|
url = url.slice(0, hashmarkIndex);
|
||||||
|
}
|
||||||
|
url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
return url;
|
||||||
|
}
|
||||||
16
server/node_modules/axios/lib/helpers/callbackify.js
generated
vendored
Normal file
16
server/node_modules/axios/lib/helpers/callbackify.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import utils from "../utils.js";
|
||||||
|
|
||||||
|
const callbackify = (fn, reducer) => {
|
||||||
|
return utils.isAsyncFn(fn) ? function (...args) {
|
||||||
|
const cb = args.pop();
|
||||||
|
fn.apply(this, args).then((value) => {
|
||||||
|
try {
|
||||||
|
reducer ? cb(null, ...reducer(value)) : cb(null, value);
|
||||||
|
} catch (err) {
|
||||||
|
cb(err);
|
||||||
|
}
|
||||||
|
}, cb);
|
||||||
|
} : fn;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default callbackify;
|
||||||
15
server/node_modules/axios/lib/helpers/combineURLs.js
generated
vendored
Normal file
15
server/node_modules/axios/lib/helpers/combineURLs.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new URL by combining the specified URLs
|
||||||
|
*
|
||||||
|
* @param {string} baseURL The base URL
|
||||||
|
* @param {string} relativeURL The relative URL
|
||||||
|
*
|
||||||
|
* @returns {string} The combined URL
|
||||||
|
*/
|
||||||
|
export default function combineURLs(baseURL, relativeURL) {
|
||||||
|
return relativeURL
|
||||||
|
? baseURL.replace(/\/?\/$/, '') + '/' + relativeURL.replace(/^\/+/, '')
|
||||||
|
: baseURL;
|
||||||
|
}
|
||||||
48
server/node_modules/axios/lib/helpers/composeSignals.js
generated
vendored
Normal file
48
server/node_modules/axios/lib/helpers/composeSignals.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import CanceledError from "../cancel/CanceledError.js";
|
||||||
|
import AxiosError from "../core/AxiosError.js";
|
||||||
|
import utils from '../utils.js';
|
||||||
|
|
||||||
|
const composeSignals = (signals, timeout) => {
|
||||||
|
const {length} = (signals = signals ? signals.filter(Boolean) : []);
|
||||||
|
|
||||||
|
if (timeout || length) {
|
||||||
|
let controller = new AbortController();
|
||||||
|
|
||||||
|
let aborted;
|
||||||
|
|
||||||
|
const onabort = function (reason) {
|
||||||
|
if (!aborted) {
|
||||||
|
aborted = true;
|
||||||
|
unsubscribe();
|
||||||
|
const err = reason instanceof Error ? reason : this.reason;
|
||||||
|
controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let timer = timeout && setTimeout(() => {
|
||||||
|
timer = null;
|
||||||
|
onabort(new AxiosError(`timeout of ${timeout}ms exceeded`, AxiosError.ETIMEDOUT))
|
||||||
|
}, timeout)
|
||||||
|
|
||||||
|
const unsubscribe = () => {
|
||||||
|
if (signals) {
|
||||||
|
timer && clearTimeout(timer);
|
||||||
|
timer = null;
|
||||||
|
signals.forEach(signal => {
|
||||||
|
signal.unsubscribe ? signal.unsubscribe(onabort) : signal.removeEventListener('abort', onabort);
|
||||||
|
});
|
||||||
|
signals = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
signals.forEach((signal) => signal.addEventListener('abort', onabort));
|
||||||
|
|
||||||
|
const {signal} = controller;
|
||||||
|
|
||||||
|
signal.unsubscribe = () => utils.asap(unsubscribe);
|
||||||
|
|
||||||
|
return signal;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default composeSignals;
|
||||||
53
server/node_modules/axios/lib/helpers/cookies.js
generated
vendored
Normal file
53
server/node_modules/axios/lib/helpers/cookies.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import utils from '../utils.js';
|
||||||
|
import platform from '../platform/index.js';
|
||||||
|
|
||||||
|
export default platform.hasStandardBrowserEnv ?
|
||||||
|
|
||||||
|
// Standard browser envs support document.cookie
|
||||||
|
{
|
||||||
|
write(name, value, expires, path, domain, secure, sameSite) {
|
||||||
|
if (typeof document === 'undefined') return;
|
||||||
|
|
||||||
|
const cookie = [`${name}=${encodeURIComponent(value)}`];
|
||||||
|
|
||||||
|
if (utils.isNumber(expires)) {
|
||||||
|
cookie.push(`expires=${new Date(expires).toUTCString()}`);
|
||||||
|
}
|
||||||
|
if (utils.isString(path)) {
|
||||||
|
cookie.push(`path=${path}`);
|
||||||
|
}
|
||||||
|
if (utils.isString(domain)) {
|
||||||
|
cookie.push(`domain=${domain}`);
|
||||||
|
}
|
||||||
|
if (secure === true) {
|
||||||
|
cookie.push('secure');
|
||||||
|
}
|
||||||
|
if (utils.isString(sameSite)) {
|
||||||
|
cookie.push(`SameSite=${sameSite}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
document.cookie = cookie.join('; ');
|
||||||
|
},
|
||||||
|
|
||||||
|
read(name) {
|
||||||
|
if (typeof document === 'undefined') return null;
|
||||||
|
const match = document.cookie.match(new RegExp('(?:^|; )' + name + '=([^;]*)'));
|
||||||
|
return match ? decodeURIComponent(match[1]) : null;
|
||||||
|
},
|
||||||
|
|
||||||
|
remove(name) {
|
||||||
|
this.write(name, '', Date.now() - 86400000, '/');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
:
|
||||||
|
|
||||||
|
// Non-standard browser env (web workers, react-native) lack needed support.
|
||||||
|
{
|
||||||
|
write() {},
|
||||||
|
read() {
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
remove() {}
|
||||||
|
};
|
||||||
|
|
||||||
26
server/node_modules/axios/lib/helpers/deprecatedMethod.js
generated
vendored
Normal file
26
server/node_modules/axios/lib/helpers/deprecatedMethod.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
/*eslint no-console:0*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Supply a warning to the developer that a method they are using
|
||||||
|
* has been deprecated.
|
||||||
|
*
|
||||||
|
* @param {string} method The name of the deprecated method
|
||||||
|
* @param {string} [instead] The alternate method to use if applicable
|
||||||
|
* @param {string} [docs] The documentation URL to get further details
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
export default function deprecatedMethod(method, instead, docs) {
|
||||||
|
try {
|
||||||
|
console.warn(
|
||||||
|
'DEPRECATED method `' + method + '`.' +
|
||||||
|
(instead ? ' Use `' + instead + '` instead.' : '') +
|
||||||
|
' This method will be removed in a future release.');
|
||||||
|
|
||||||
|
if (docs) {
|
||||||
|
console.warn('For more information about usage see ' + docs);
|
||||||
|
}
|
||||||
|
} catch (e) { /* Ignore */ }
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user