mirror of
https://github.com/sussy-code/smov.git
synced 2024-12-20 14:37:43 +01:00
Merge pull request #561 from movie-web/add-providers-api
Add providers api integration
This commit is contained in:
commit
025aaffc2b
16 changed files with 654 additions and 162 deletions
|
@ -20,7 +20,6 @@
|
|||
<link href="https://fonts.googleapis.com/css2?family=Open+Sans:wght@400;500;600;700&display=swap" rel="stylesheet" />
|
||||
|
||||
<script src="/config.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/gh/movie-web/6C6F6C7A@8b821f445b83d51ef1b8f42c99b7346f6b47dce5/out.js"></script>
|
||||
|
||||
<!-- prevent darkreader extension from messing with our already dark site -->
|
||||
<meta name="darkreader-lock" />
|
||||
|
@ -59,4 +58,4 @@
|
|||
<script type="module" src="/src/index.tsx"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</html>
|
||||
|
|
|
@ -45,6 +45,7 @@
|
|||
"i18next": "^22.4.5",
|
||||
"immer": "^10.0.2",
|
||||
"iso-639-1": "^3.1.0",
|
||||
"jwt-decode": "^4.0.0",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"nanoid": "^5.0.4",
|
||||
"node-forge": "^1.3.1",
|
||||
|
@ -57,6 +58,7 @@
|
|||
"react-i18next": "^12.1.1",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"react-sticky-el": "^2.1.0",
|
||||
"react-turnstile": "^1.1.2",
|
||||
"react-use": "^17.4.0",
|
||||
"slugify": "^1.6.6",
|
||||
"subsrt-ts": "^2.1.1",
|
||||
|
|
|
@ -68,6 +68,9 @@ dependencies:
|
|||
iso-639-1:
|
||||
specifier: ^3.1.0
|
||||
version: 3.1.0
|
||||
jwt-decode:
|
||||
specifier: ^4.0.0
|
||||
version: 4.0.0
|
||||
lodash.isequal:
|
||||
specifier: ^4.5.0
|
||||
version: 4.5.0
|
||||
|
@ -104,6 +107,9 @@ dependencies:
|
|||
react-sticky-el:
|
||||
specifier: ^2.1.0
|
||||
version: 2.1.0(react-dom@17.0.2)(react@17.0.2)
|
||||
react-turnstile:
|
||||
specifier: ^1.1.2
|
||||
version: 1.1.2(react-dom@17.0.2)(react@17.0.2)
|
||||
react-use:
|
||||
specifier: ^17.4.0
|
||||
version: 17.4.0(react-dom@17.0.2)(react@17.0.2)
|
||||
|
@ -4523,6 +4529,11 @@ packages:
|
|||
resolution: {integrity: sha512-cxQGGUiit6CGUpuuiezY8N4m1wgF4o7127rXEXDFcxeDUFfdV7gSkwA26Fe2wWBiNQq2SZOgN4gSmMxB/StA8Q==}
|
||||
dev: true
|
||||
|
||||
/jwt-decode@4.0.0:
|
||||
resolution: {integrity: sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==}
|
||||
engines: {node: '>=18'}
|
||||
dev: false
|
||||
|
||||
/keyv@4.5.3:
|
||||
resolution: {integrity: sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug==}
|
||||
dependencies:
|
||||
|
@ -5321,6 +5332,16 @@ packages:
|
|||
react-dom: 17.0.2(react@17.0.2)
|
||||
dev: false
|
||||
|
||||
/react-turnstile@1.1.2(react-dom@17.0.2)(react@17.0.2):
|
||||
resolution: {integrity: sha512-wfhSf4JtXlmLRkfxMryU8yEeCbh401muKoInhx+TegYwP8RprUW5XPZa8WnCNZiYpMy1i6IXAb1Ar7xj5HxJag==}
|
||||
peerDependencies:
|
||||
react: '>= 17.0.0'
|
||||
react-dom: '>= 17.0.0'
|
||||
dependencies:
|
||||
react: 17.0.2
|
||||
react-dom: 17.0.2(react@17.0.2)
|
||||
dev: false
|
||||
|
||||
/react-universal-interface@0.6.2(react@17.0.2)(tslib@2.6.2):
|
||||
resolution: {integrity: sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw==}
|
||||
peerDependencies:
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { FetchOptions, FetchResponse, ofetch } from "ofetch";
|
||||
import { ofetch } from "ofetch";
|
||||
|
||||
import { getApiToken, setApiToken } from "@/backend/helpers/providerApi";
|
||||
import { getLoadbalancedProxyUrl } from "@/utils/providers";
|
||||
|
||||
type P<T> = Parameters<typeof ofetch<T, any>>;
|
||||
|
@ -21,7 +22,11 @@ export function mwFetch<T>(url: string, ops: P<T>[1] = {}): R<T> {
|
|||
return baseFetch<T>(url, ops);
|
||||
}
|
||||
|
||||
export function proxiedFetch<T>(url: string, ops: P<T>[1] = {}): R<T> {
|
||||
export async function singularProxiedFetch<T>(
|
||||
proxyUrl: string,
|
||||
url: string,
|
||||
ops: P<T>[1] = {}
|
||||
): R<T> {
|
||||
let combinedUrl = ops?.baseURL ?? "";
|
||||
if (
|
||||
combinedUrl.length > 0 &&
|
||||
|
@ -45,45 +50,30 @@ export function proxiedFetch<T>(url: string, ops: P<T>[1] = {}): R<T> {
|
|||
parsedUrl.searchParams.set(k, v);
|
||||
});
|
||||
|
||||
return baseFetch<T>(getLoadbalancedProxyUrl(), {
|
||||
let headers = ops.headers ?? {};
|
||||
const apiToken = await getApiToken();
|
||||
if (apiToken)
|
||||
headers = {
|
||||
...headers,
|
||||
"X-Token": apiToken,
|
||||
};
|
||||
|
||||
return baseFetch<T>(proxyUrl, {
|
||||
...ops,
|
||||
baseURL: undefined,
|
||||
params: {
|
||||
destination: parsedUrl.toString(),
|
||||
},
|
||||
query: {},
|
||||
});
|
||||
}
|
||||
|
||||
export function rawProxiedFetch<T>(
|
||||
url: string,
|
||||
ops: FetchOptions = {}
|
||||
): Promise<FetchResponse<T>> {
|
||||
let combinedUrl = ops?.baseURL ?? "";
|
||||
if (
|
||||
combinedUrl.length > 0 &&
|
||||
combinedUrl.endsWith("/") &&
|
||||
url.startsWith("/")
|
||||
)
|
||||
combinedUrl += url.slice(1);
|
||||
else if (
|
||||
combinedUrl.length > 0 &&
|
||||
!combinedUrl.endsWith("/") &&
|
||||
!url.startsWith("/")
|
||||
)
|
||||
combinedUrl += `/${url}`;
|
||||
else combinedUrl += url;
|
||||
|
||||
const parsedUrl = new URL(combinedUrl);
|
||||
Object.entries(ops?.params ?? {}).forEach(([k, v]) => {
|
||||
parsedUrl.searchParams.set(k, v);
|
||||
});
|
||||
|
||||
return baseFetch.raw(getLoadbalancedProxyUrl(), {
|
||||
...ops,
|
||||
baseURL: undefined,
|
||||
params: {
|
||||
destination: parsedUrl.toString(),
|
||||
headers,
|
||||
onResponse(context) {
|
||||
const tokenHeader = context.response.headers.get("X-Token");
|
||||
if (tokenHeader) setApiToken(tokenHeader);
|
||||
ops.onResponse?.(context);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function proxiedFetch<T>(url: string, ops: P<T>[1] = {}): R<T> {
|
||||
return singularProxiedFetch<T>(getLoadbalancedProxyUrl(), url, ops);
|
||||
}
|
||||
|
|
158
src/backend/helpers/providerApi.ts
Normal file
158
src/backend/helpers/providerApi.ts
Normal file
|
@ -0,0 +1,158 @@
|
|||
import { MetaOutput, NotFoundError, ScrapeMedia } from "@movie-web/providers";
|
||||
import { jwtDecode } from "jwt-decode";
|
||||
|
||||
import { mwFetch } from "@/backend/helpers/fetch";
|
||||
import { getTurnstileToken, isTurnstileInitialized } from "@/stores/turnstile";
|
||||
|
||||
let metaDataCache: MetaOutput[] | null = null;
|
||||
let token: null | string = null;
|
||||
|
||||
export function setCachedMetadata(data: MetaOutput[]) {
|
||||
metaDataCache = data;
|
||||
}
|
||||
|
||||
export function getCachedMetadata(): MetaOutput[] {
|
||||
return metaDataCache ?? [];
|
||||
}
|
||||
|
||||
export function setApiToken(newToken: string) {
|
||||
token = newToken;
|
||||
}
|
||||
|
||||
function getTokenIfValid(): null | string {
|
||||
if (!token) return null;
|
||||
try {
|
||||
const body = jwtDecode(token);
|
||||
if (!body.exp) return `jwt|${token}`;
|
||||
if (Date.now() / 1000 < body.exp) return `jwt|${token}`;
|
||||
} catch (err) {
|
||||
// we dont care about parse errors
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function fetchMetadata(base: string) {
|
||||
if (metaDataCache) return;
|
||||
const data = await mwFetch<MetaOutput[][]>(`${base}/metadata`);
|
||||
metaDataCache = data.flat();
|
||||
}
|
||||
|
||||
function scrapeMediaToQueryMedia(media: ScrapeMedia) {
|
||||
let extra: Record<string, string> = {};
|
||||
if (media.type === "show") {
|
||||
extra = {
|
||||
episodeNumber: media.episode.number.toString(),
|
||||
episodeTmdbId: media.episode.tmdbId,
|
||||
seasonNumber: media.season.number.toString(),
|
||||
seasonTmdbId: media.season.tmdbId,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
type: media.type,
|
||||
releaseYear: media.releaseYear.toString(),
|
||||
imdbId: media.imdbId,
|
||||
tmdbId: media.tmdbId,
|
||||
title: media.title,
|
||||
...extra,
|
||||
};
|
||||
}
|
||||
|
||||
function addQueryDataToUrl(url: URL, data: Record<string, string | undefined>) {
|
||||
Object.entries(data).forEach((entry) => {
|
||||
if (entry[1]) url.searchParams.set(entry[0], entry[1]);
|
||||
});
|
||||
}
|
||||
|
||||
export function makeProviderUrl(base: string) {
|
||||
const makeUrl = (p: string) => new URL(`${base}${p}`);
|
||||
return {
|
||||
scrapeSource(sourceId: string, media: ScrapeMedia) {
|
||||
const url = makeUrl("/scrape/source");
|
||||
addQueryDataToUrl(url, scrapeMediaToQueryMedia(media));
|
||||
addQueryDataToUrl(url, { id: sourceId });
|
||||
return url.toString();
|
||||
},
|
||||
scrapeAll(media: ScrapeMedia) {
|
||||
const url = makeUrl("/scrape");
|
||||
addQueryDataToUrl(url, scrapeMediaToQueryMedia(media));
|
||||
return url.toString();
|
||||
},
|
||||
scrapeEmbed(embedId: string, embedUrl: string) {
|
||||
const url = makeUrl("/scrape/embed");
|
||||
addQueryDataToUrl(url, { id: embedId, url: embedUrl });
|
||||
return url.toString();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function getApiToken(): Promise<string | null> {
|
||||
let apiToken = getTokenIfValid();
|
||||
if (!apiToken && isTurnstileInitialized()) {
|
||||
apiToken = `turnstile|${await getTurnstileToken()}`;
|
||||
}
|
||||
return apiToken;
|
||||
}
|
||||
|
||||
export async function connectServerSideEvents<T>(
|
||||
url: string,
|
||||
endEvents: string[]
|
||||
) {
|
||||
const apiToken = await getApiToken();
|
||||
|
||||
// insert token, if its set
|
||||
const parsedUrl = new URL(url);
|
||||
if (apiToken) parsedUrl.searchParams.set("token", apiToken);
|
||||
const eventSource = new EventSource(parsedUrl.toString());
|
||||
|
||||
let promReject: (reason?: any) => void;
|
||||
let promResolve: (value: T) => void;
|
||||
const promise = new Promise<T>((resolve, reject) => {
|
||||
promResolve = resolve;
|
||||
promReject = reject;
|
||||
});
|
||||
|
||||
endEvents.forEach((evt) => {
|
||||
eventSource.addEventListener(evt, (e) => {
|
||||
eventSource.close();
|
||||
promResolve(JSON.parse(e.data));
|
||||
});
|
||||
});
|
||||
|
||||
eventSource.addEventListener("token", (e) => {
|
||||
setApiToken(JSON.parse(e.data));
|
||||
});
|
||||
|
||||
eventSource.addEventListener("error", (err: MessageEvent<any>) => {
|
||||
eventSource.close();
|
||||
if (err.data) {
|
||||
const data = JSON.parse(err.data);
|
||||
let errObj = new Error("scrape error");
|
||||
if (data.name === NotFoundError.name)
|
||||
errObj = new NotFoundError("Notfound from server");
|
||||
Object.assign(errObj, data);
|
||||
promReject(errObj);
|
||||
return;
|
||||
}
|
||||
|
||||
console.error("Failed to connect to SSE", err);
|
||||
promReject(err);
|
||||
});
|
||||
|
||||
eventSource.addEventListener("message", (ev) => {
|
||||
if (!ev) {
|
||||
eventSource.close();
|
||||
return;
|
||||
}
|
||||
setTimeout(() => {
|
||||
promReject(new Error("SSE closed improperly"));
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
return {
|
||||
promise: () => promise,
|
||||
on<Data>(event: string, cb: (data: Data) => void) {
|
||||
eventSource.addEventListener(event, (e) => cb(JSON.parse(e.data)));
|
||||
},
|
||||
};
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
import { useMemo } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
import { getCachedMetadata } from "@/backend/helpers/providerApi";
|
||||
import { Toggle } from "@/components/buttons/Toggle";
|
||||
import { Icon, Icons } from "@/components/Icon";
|
||||
import { useCaptions } from "@/components/player/hooks/useCaptions";
|
||||
|
@ -10,7 +11,6 @@ import { useOverlayRouter } from "@/hooks/useOverlayRouter";
|
|||
import { usePlayerStore } from "@/stores/player/store";
|
||||
import { qualityToString } from "@/stores/player/utils/qualities";
|
||||
import { useSubtitleStore } from "@/stores/subtitles";
|
||||
import { providers } from "@/utils/providers";
|
||||
|
||||
export function SettingsMenu({ id }: { id: string }) {
|
||||
const { t } = useTranslation();
|
||||
|
@ -23,7 +23,10 @@ export function SettingsMenu({ id }: { id: string }) {
|
|||
const currentSourceId = usePlayerStore((s) => s.sourceId);
|
||||
const sourceName = useMemo(() => {
|
||||
if (!currentSourceId) return "...";
|
||||
return providers.getMetadata(currentSourceId)?.name ?? "...";
|
||||
const source = getCachedMetadata().find(
|
||||
(src) => src.id === currentSourceId
|
||||
);
|
||||
return source?.name ?? "...";
|
||||
}, [currentSourceId]);
|
||||
const { toggleLastUsed } = useCaptions();
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { ReactNode, useEffect, useMemo, useRef } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
import { getCachedMetadata } from "@/backend/helpers/providerApi";
|
||||
import { Loading } from "@/components/layout/Loading";
|
||||
import {
|
||||
useEmbedScraping,
|
||||
|
@ -10,7 +11,6 @@ import { Menu } from "@/components/player/internals/ContextMenu";
|
|||
import { SelectableLink } from "@/components/player/internals/ContextMenu/Links";
|
||||
import { useOverlayRouter } from "@/hooks/useOverlayRouter";
|
||||
import { usePlayerStore } from "@/stores/player/store";
|
||||
import { providers } from "@/utils/providers";
|
||||
|
||||
export interface SourceSelectionViewProps {
|
||||
id: string;
|
||||
|
@ -33,7 +33,7 @@ export function EmbedOption(props: {
|
|||
|
||||
const embedName = useMemo(() => {
|
||||
if (!props.embedId) return unknownEmbedName;
|
||||
const sourceMeta = providers.getMetadata(props.embedId);
|
||||
const sourceMeta = getCachedMetadata().find((s) => s.id === props.embedId);
|
||||
return sourceMeta?.name ?? unknownEmbedName;
|
||||
}, [props.embedId, unknownEmbedName]);
|
||||
|
||||
|
@ -61,7 +61,7 @@ export function EmbedSelectionView({ sourceId, id }: EmbedSelectionViewProps) {
|
|||
|
||||
const sourceName = useMemo(() => {
|
||||
if (!sourceId) return "...";
|
||||
const sourceMeta = providers.getMetadata(sourceId);
|
||||
const sourceMeta = getCachedMetadata().find((s) => s.id === sourceId);
|
||||
return sourceMeta?.name ?? "...";
|
||||
}, [sourceId]);
|
||||
|
||||
|
@ -137,8 +137,8 @@ export function SourceSelectionView({
|
|||
const currentSourceId = usePlayerStore((s) => s.sourceId);
|
||||
const sources = useMemo(() => {
|
||||
if (!metaType) return [];
|
||||
return providers
|
||||
.listSources()
|
||||
return getCachedMetadata()
|
||||
.filter((v) => v.type === "source")
|
||||
.filter((v) => v.mediaTypes?.includes(metaType));
|
||||
}, [metaType]);
|
||||
|
||||
|
|
|
@ -5,6 +5,10 @@ import {
|
|||
} from "@movie-web/providers";
|
||||
import { useAsyncFn } from "react-use";
|
||||
|
||||
import {
|
||||
connectServerSideEvents,
|
||||
makeProviderUrl,
|
||||
} from "@/backend/helpers/providerApi";
|
||||
import {
|
||||
scrapeSourceOutputToProviderMetric,
|
||||
useReportProviders,
|
||||
|
@ -14,7 +18,7 @@ import { convertRunoutputToSource } from "@/components/player/utils/convertRunou
|
|||
import { useOverlayRouter } from "@/hooks/useOverlayRouter";
|
||||
import { metaToScrapeMedia } from "@/stores/player/slices/source";
|
||||
import { usePlayerStore } from "@/stores/player/store";
|
||||
import { providers } from "@/utils/providers";
|
||||
import { getLoadbalancedProviderApiUrl, providers } from "@/utils/providers";
|
||||
|
||||
export function useEmbedScraping(
|
||||
routerId: string,
|
||||
|
@ -31,13 +35,23 @@ export function useEmbedScraping(
|
|||
const { report } = useReportProviders();
|
||||
|
||||
const [request, run] = useAsyncFn(async () => {
|
||||
const providerApiUrl = getLoadbalancedProviderApiUrl();
|
||||
let result: EmbedOutput | undefined;
|
||||
if (!meta) return;
|
||||
try {
|
||||
result = await providers.runEmbedScraper({
|
||||
id: embedId,
|
||||
url,
|
||||
});
|
||||
if (providerApiUrl) {
|
||||
const baseUrlMaker = makeProviderUrl(providerApiUrl);
|
||||
const conn = await connectServerSideEvents<EmbedOutput>(
|
||||
baseUrlMaker.scrapeEmbed(embedId, url),
|
||||
["completed", "noOutput"]
|
||||
);
|
||||
result = await conn.promise();
|
||||
} else {
|
||||
result = await providers.runEmbedScraper({
|
||||
id: embedId,
|
||||
url,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to scrape ${embedId}`, err);
|
||||
const notFound = err instanceof NotFoundError;
|
||||
|
@ -85,13 +99,23 @@ export function useSourceScraping(sourceId: string | null, routerId: string) {
|
|||
const [request, run] = useAsyncFn(async () => {
|
||||
if (!sourceId || !meta) return null;
|
||||
const scrapeMedia = metaToScrapeMedia(meta);
|
||||
const providerApiUrl = getLoadbalancedProviderApiUrl();
|
||||
|
||||
let result: SourcererOutput | undefined;
|
||||
try {
|
||||
result = await providers.runSourceScraper({
|
||||
id: sourceId,
|
||||
media: scrapeMedia,
|
||||
});
|
||||
if (providerApiUrl) {
|
||||
const baseUrlMaker = makeProviderUrl(providerApiUrl);
|
||||
const conn = await connectServerSideEvents<SourcererOutput>(
|
||||
baseUrlMaker.scrapeSource(sourceId, scrapeMedia),
|
||||
["completed", "noOutput"]
|
||||
);
|
||||
result = await conn.promise();
|
||||
} else {
|
||||
result = await providers.runSourceScraper({
|
||||
id: sourceId,
|
||||
media: scrapeMedia,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to scrape ${sourceId}`, err);
|
||||
const notFound = err instanceof NotFoundError;
|
||||
|
@ -120,10 +144,22 @@ export function useSourceScraping(sourceId: string | null, routerId: string) {
|
|||
let embedResult: EmbedOutput | undefined;
|
||||
if (!meta) return;
|
||||
try {
|
||||
embedResult = await providers.runEmbedScraper({
|
||||
id: result.embeds[0].embedId,
|
||||
url: result.embeds[0].url,
|
||||
});
|
||||
if (providerApiUrl) {
|
||||
const baseUrlMaker = makeProviderUrl(providerApiUrl);
|
||||
const conn = await connectServerSideEvents<EmbedOutput>(
|
||||
baseUrlMaker.scrapeEmbed(
|
||||
result.embeds[0].embedId,
|
||||
result.embeds[0].url
|
||||
),
|
||||
["completed", "noOutput"]
|
||||
);
|
||||
embedResult = await conn.promise();
|
||||
} else {
|
||||
embedResult = await providers.runEmbedScraper({
|
||||
id: result.embeds[0].embedId,
|
||||
url: result.embeds[0].url,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to scrape ${result.embeds[0].embedId}`, err);
|
||||
const notFound = err instanceof NotFoundError;
|
||||
|
|
|
@ -1,7 +1,16 @@
|
|||
import { ScrapeMedia } from "@movie-web/providers";
|
||||
import {
|
||||
FullScraperEvents,
|
||||
RunOutput,
|
||||
ScrapeMedia,
|
||||
} from "@movie-web/providers";
|
||||
import { RefObject, useCallback, useEffect, useRef, useState } from "react";
|
||||
|
||||
import { providers } from "@/utils/providers";
|
||||
import {
|
||||
connectServerSideEvents,
|
||||
getCachedMetadata,
|
||||
makeProviderUrl,
|
||||
} from "@/backend/helpers/providerApi";
|
||||
import { getLoadbalancedProviderApiUrl, providers } from "@/utils/providers";
|
||||
|
||||
export interface ScrapingItems {
|
||||
id: string;
|
||||
|
@ -18,96 +27,169 @@ export interface ScrapingSegment {
|
|||
percentage: number;
|
||||
}
|
||||
|
||||
export function useScrape() {
|
||||
type ScraperEvent<Event extends keyof FullScraperEvents> = Parameters<
|
||||
NonNullable<FullScraperEvents[Event]>
|
||||
>[0];
|
||||
|
||||
function useBaseScrape() {
|
||||
const [sources, setSources] = useState<Record<string, ScrapingSegment>>({});
|
||||
const [sourceOrder, setSourceOrder] = useState<ScrapingItems[]>([]);
|
||||
const [currentSource, setCurrentSource] = useState<string>();
|
||||
const lastId = useRef<string | null>(null);
|
||||
|
||||
const initEvent = useCallback((evt: ScraperEvent<"init">) => {
|
||||
setSources(
|
||||
evt.sourceIds
|
||||
.map((v) => {
|
||||
const source = getCachedMetadata().find((s) => s.id === v);
|
||||
if (!source) throw new Error("invalid source id");
|
||||
const out: ScrapingSegment = {
|
||||
name: source.name,
|
||||
id: source.id,
|
||||
status: "waiting",
|
||||
percentage: 0,
|
||||
};
|
||||
return out;
|
||||
})
|
||||
.reduce<Record<string, ScrapingSegment>>((a, v) => {
|
||||
a[v.id] = v;
|
||||
return a;
|
||||
}, {})
|
||||
);
|
||||
setSourceOrder(evt.sourceIds.map((v) => ({ id: v, children: [] })));
|
||||
}, []);
|
||||
|
||||
const startEvent = useCallback((id: ScraperEvent<"start">) => {
|
||||
setSources((s) => {
|
||||
if (s[id]) s[id].status = "pending";
|
||||
return { ...s };
|
||||
});
|
||||
setCurrentSource(id);
|
||||
lastId.current = id;
|
||||
}, []);
|
||||
|
||||
const updateEvent = useCallback((evt: ScraperEvent<"update">) => {
|
||||
setSources((s) => {
|
||||
if (s[evt.id]) {
|
||||
s[evt.id].status = evt.status;
|
||||
s[evt.id].reason = evt.reason;
|
||||
s[evt.id].error = evt.error;
|
||||
s[evt.id].percentage = evt.percentage;
|
||||
}
|
||||
return { ...s };
|
||||
});
|
||||
}, []);
|
||||
|
||||
const discoverEmbedsEvent = useCallback(
|
||||
(evt: ScraperEvent<"discoverEmbeds">) => {
|
||||
setSources((s) => {
|
||||
evt.embeds.forEach((v) => {
|
||||
const source = getCachedMetadata().find(
|
||||
(src) => src.id === v.embedScraperId
|
||||
);
|
||||
if (!source) throw new Error("invalid source id");
|
||||
const out: ScrapingSegment = {
|
||||
embedId: v.embedScraperId,
|
||||
name: source.name,
|
||||
id: v.id,
|
||||
status: "waiting",
|
||||
percentage: 0,
|
||||
};
|
||||
s[v.id] = out;
|
||||
});
|
||||
return { ...s };
|
||||
});
|
||||
setSourceOrder((s) => {
|
||||
const source = s.find((v) => v.id === evt.sourceId);
|
||||
if (!source) throw new Error("invalid source id");
|
||||
source.children = evt.embeds.map((v) => v.id);
|
||||
return [...s];
|
||||
});
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const startScrape = useCallback(() => {
|
||||
lastId.current = null;
|
||||
}, []);
|
||||
|
||||
const getResult = useCallback((output: RunOutput | null) => {
|
||||
if (output && lastId.current) {
|
||||
setSources((s) => {
|
||||
if (!lastId.current) return s;
|
||||
if (s[lastId.current]) s[lastId.current].status = "success";
|
||||
return { ...s };
|
||||
});
|
||||
}
|
||||
return output;
|
||||
}, []);
|
||||
|
||||
return {
|
||||
initEvent,
|
||||
startEvent,
|
||||
updateEvent,
|
||||
discoverEmbedsEvent,
|
||||
startScrape,
|
||||
getResult,
|
||||
sources,
|
||||
sourceOrder,
|
||||
currentSource,
|
||||
};
|
||||
}
|
||||
|
||||
export function useScrape() {
|
||||
const {
|
||||
sources,
|
||||
sourceOrder,
|
||||
currentSource,
|
||||
updateEvent,
|
||||
discoverEmbedsEvent,
|
||||
initEvent,
|
||||
getResult,
|
||||
startEvent,
|
||||
startScrape,
|
||||
} = useBaseScrape();
|
||||
|
||||
const startScraping = useCallback(
|
||||
async (media: ScrapeMedia) => {
|
||||
if (!providers) return null;
|
||||
const providerApiUrl = getLoadbalancedProviderApiUrl();
|
||||
if (providerApiUrl) {
|
||||
startScrape();
|
||||
const baseUrlMaker = makeProviderUrl(providerApiUrl);
|
||||
const conn = await connectServerSideEvents<RunOutput | "">(
|
||||
baseUrlMaker.scrapeAll(media),
|
||||
["completed", "noOutput"]
|
||||
);
|
||||
conn.on("init", initEvent);
|
||||
conn.on("start", startEvent);
|
||||
conn.on("update", updateEvent);
|
||||
conn.on("discoverEmbeds", discoverEmbedsEvent);
|
||||
const sseOutput = await conn.promise();
|
||||
|
||||
let lastId: string | null = null;
|
||||
return getResult(sseOutput === "" ? null : sseOutput);
|
||||
}
|
||||
|
||||
if (!providers) return null;
|
||||
startScrape();
|
||||
const output = await providers.runAll({
|
||||
media,
|
||||
events: {
|
||||
init(evt) {
|
||||
setSources(
|
||||
evt.sourceIds
|
||||
.map((v) => {
|
||||
const source = providers.getMetadata(v);
|
||||
if (!source) throw new Error("invalid source id");
|
||||
const out: ScrapingSegment = {
|
||||
name: source.name,
|
||||
id: source.id,
|
||||
status: "waiting",
|
||||
percentage: 0,
|
||||
};
|
||||
return out;
|
||||
})
|
||||
.reduce<Record<string, ScrapingSegment>>((a, v) => {
|
||||
a[v.id] = v;
|
||||
return a;
|
||||
}, {})
|
||||
);
|
||||
setSourceOrder(evt.sourceIds.map((v) => ({ id: v, children: [] })));
|
||||
},
|
||||
start(id) {
|
||||
setSources((s) => {
|
||||
if (s[id]) s[id].status = "pending";
|
||||
return { ...s };
|
||||
});
|
||||
setCurrentSource(id);
|
||||
lastId = id;
|
||||
},
|
||||
update(evt) {
|
||||
setSources((s) => {
|
||||
if (s[evt.id]) {
|
||||
s[evt.id].status = evt.status;
|
||||
s[evt.id].reason = evt.reason;
|
||||
s[evt.id].error = evt.error;
|
||||
s[evt.id].percentage = evt.percentage;
|
||||
}
|
||||
return { ...s };
|
||||
});
|
||||
},
|
||||
discoverEmbeds(evt) {
|
||||
setSources((s) => {
|
||||
evt.embeds.forEach((v) => {
|
||||
const source = providers.getMetadata(v.embedScraperId);
|
||||
if (!source) throw new Error("invalid source id");
|
||||
const out: ScrapingSegment = {
|
||||
embedId: v.embedScraperId,
|
||||
name: source.name,
|
||||
id: v.id,
|
||||
status: "waiting",
|
||||
percentage: 0,
|
||||
};
|
||||
s[v.id] = out;
|
||||
});
|
||||
return { ...s };
|
||||
});
|
||||
setSourceOrder((s) => {
|
||||
const source = s.find((v) => v.id === evt.sourceId);
|
||||
if (!source) throw new Error("invalid source id");
|
||||
source.children = evt.embeds.map((v) => v.id);
|
||||
return [...s];
|
||||
});
|
||||
},
|
||||
init: initEvent,
|
||||
start: startEvent,
|
||||
update: updateEvent,
|
||||
discoverEmbeds: discoverEmbedsEvent,
|
||||
},
|
||||
});
|
||||
|
||||
if (output && lastId) {
|
||||
setSources((s) => {
|
||||
if (!lastId) return s;
|
||||
if (s[lastId]) s[lastId].status = "success";
|
||||
return { ...s };
|
||||
});
|
||||
}
|
||||
|
||||
return output;
|
||||
return getResult(output);
|
||||
},
|
||||
[setSourceOrder, setSources]
|
||||
[
|
||||
initEvent,
|
||||
startEvent,
|
||||
updateEvent,
|
||||
discoverEmbedsEvent,
|
||||
getResult,
|
||||
startScrape,
|
||||
]
|
||||
);
|
||||
|
||||
return {
|
||||
|
|
|
@ -10,6 +10,7 @@ import ReactDOM from "react-dom";
|
|||
import { HelmetProvider } from "react-helmet-async";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { BrowserRouter, HashRouter } from "react-router-dom";
|
||||
import Turnstile from "react-turnstile";
|
||||
import { useAsync } from "react-use";
|
||||
|
||||
import { Button } from "@/components/buttons/Button";
|
||||
|
@ -30,16 +31,12 @@ import { useLanguageStore } from "@/stores/language";
|
|||
import { ProgressSyncer } from "@/stores/progress/ProgressSyncer";
|
||||
import { SettingsSyncer } from "@/stores/subtitles/SettingsSyncer";
|
||||
import { ThemeProvider } from "@/stores/theme";
|
||||
import { TurnstileProvider } from "@/stores/turnstile";
|
||||
|
||||
import { initializeChromecast } from "./setup/chromecast";
|
||||
import { initializeOldStores } from "./stores/__old/migrations";
|
||||
|
||||
// initialize
|
||||
const key =
|
||||
(window as any)?.__CONFIG__?.VITE_KEY ?? import.meta.env.VITE_KEY ?? null;
|
||||
if (key) {
|
||||
(window as any).initMW(conf().PROXY_URLS, key);
|
||||
}
|
||||
initializeChromecast();
|
||||
|
||||
function LoadingScreen(props: { type: "user" | "lazy" }) {
|
||||
|
@ -148,6 +145,7 @@ function TheRouter(props: { children: ReactNode }) {
|
|||
ReactDOM.render(
|
||||
<React.StrictMode>
|
||||
<ErrorBoundary>
|
||||
<TurnstileProvider />
|
||||
<HelmetProvider>
|
||||
<Suspense fallback={<LoadingScreen type="lazy" />}>
|
||||
<ThemeProvider applyGlobal>
|
||||
|
|
|
@ -2,7 +2,7 @@ import classNames from "classnames";
|
|||
import { useMemo, useState } from "react";
|
||||
import { useAsyncFn } from "react-use";
|
||||
|
||||
import { mwFetch } from "@/backend/helpers/fetch";
|
||||
import { singularProxiedFetch } from "@/backend/helpers/fetch";
|
||||
import { Button } from "@/components/buttons/Button";
|
||||
import { Icon, Icons } from "@/components/Icon";
|
||||
import { Box } from "@/components/layout/Box";
|
||||
|
@ -69,11 +69,11 @@ export function WorkerTestPart() {
|
|||
});
|
||||
continue;
|
||||
}
|
||||
await mwFetch(worker.url, {
|
||||
query: {
|
||||
destination: "https://postman-echo.com/get",
|
||||
},
|
||||
});
|
||||
await singularProxiedFetch(
|
||||
worker.url,
|
||||
"https://postman-echo.com/get",
|
||||
{}
|
||||
);
|
||||
updateWorker(worker.id, {
|
||||
id: worker.id,
|
||||
status: "success",
|
||||
|
@ -94,7 +94,7 @@ export function WorkerTestPart() {
|
|||
<p className="mb-8 mt-2">{workerList.length} worker(s) registered</p>
|
||||
<Box>
|
||||
{workerList.map((v, i) => {
|
||||
const s = workerState.find((segment) => segment.id);
|
||||
const s = workerState.find((segment) => segment.id === v.id);
|
||||
const name = `Worker ${i + 1}`;
|
||||
if (!s) return <WorkerItem name={name} key={v.id} />;
|
||||
if (s.status === "error")
|
||||
|
|
|
@ -3,6 +3,10 @@ import { useHistory, useParams } from "react-router-dom";
|
|||
import { useAsync } from "react-use";
|
||||
import type { AsyncReturnType } from "type-fest";
|
||||
|
||||
import {
|
||||
fetchMetadata,
|
||||
setCachedMetadata,
|
||||
} from "@/backend/helpers/providerApi";
|
||||
import { DetailedMeta, getMetaFromId } from "@/backend/metadata/getmeta";
|
||||
import { decodeTMDBId } from "@/backend/metadata/tmdb";
|
||||
import { MWMediaType } from "@/backend/metadata/types/mw";
|
||||
|
@ -14,6 +18,7 @@ import { Paragraph } from "@/components/text/Paragraph";
|
|||
import { Title } from "@/components/text/Title";
|
||||
import { ErrorContainer, ErrorLayout } from "@/pages/layouts/ErrorLayout";
|
||||
import { conf } from "@/setup/config";
|
||||
import { getLoadbalancedProviderApiUrl, providers } from "@/utils/providers";
|
||||
|
||||
export interface MetaPartProps {
|
||||
onGetMeta?: (meta: DetailedMeta, episodeId?: string) => void;
|
||||
|
@ -36,6 +41,16 @@ export function MetaPart(props: MetaPartProps) {
|
|||
const history = useHistory();
|
||||
|
||||
const { error, value, loading } = useAsync(async () => {
|
||||
const providerApiUrl = getLoadbalancedProviderApiUrl();
|
||||
if (providerApiUrl) {
|
||||
await fetchMetadata(providerApiUrl);
|
||||
} else {
|
||||
setCachedMetadata([
|
||||
...providers.listSources(),
|
||||
...providers.listEmbeds(),
|
||||
]);
|
||||
}
|
||||
|
||||
let data: ReturnType<typeof decodeTMDBId> = null;
|
||||
try {
|
||||
data = decodeTMDBId(params.media);
|
||||
|
|
|
@ -17,6 +17,7 @@ interface Config {
|
|||
NORMAL_ROUTER: boolean;
|
||||
BACKEND_URL: string;
|
||||
DISALLOWED_IDS: string;
|
||||
TURNSTILE_KEY: string;
|
||||
}
|
||||
|
||||
export interface RuntimeConfig {
|
||||
|
@ -30,6 +31,7 @@ export interface RuntimeConfig {
|
|||
PROXY_URLS: string[];
|
||||
BACKEND_URL: string;
|
||||
DISALLOWED_IDS: string[];
|
||||
TURNSTILE_KEY: string | null;
|
||||
}
|
||||
|
||||
const env: Record<keyof Config, undefined | string> = {
|
||||
|
@ -43,6 +45,7 @@ const env: Record<keyof Config, undefined | string> = {
|
|||
NORMAL_ROUTER: import.meta.env.VITE_NORMAL_ROUTER,
|
||||
BACKEND_URL: import.meta.env.VITE_BACKEND_URL,
|
||||
DISALLOWED_IDS: import.meta.env.VITE_DISALLOWED_IDS,
|
||||
TURNSTILE_KEY: import.meta.env.VITE_TURNSTILE_KEY,
|
||||
};
|
||||
|
||||
// loads from different locations, in order: environment (VITE_{KEY}), window (public/config.js)
|
||||
|
@ -63,6 +66,7 @@ function getKey(key: keyof Config, defaultString?: string): string {
|
|||
|
||||
export function conf(): RuntimeConfig {
|
||||
const dmcaEmail = getKey("DMCA_EMAIL");
|
||||
const turnstileKey = getKey("TURNSTILE_KEY");
|
||||
return {
|
||||
APP_VERSION,
|
||||
GITHUB_LINK,
|
||||
|
@ -75,6 +79,7 @@ export function conf(): RuntimeConfig {
|
|||
.split(",")
|
||||
.map((v) => v.trim()),
|
||||
NORMAL_ROUTER: getKey("NORMAL_ROUTER", "false") === "true",
|
||||
TURNSTILE_KEY: turnstileKey.length > 0 ? turnstileKey : null,
|
||||
DISALLOWED_IDS: getKey("DISALLOWED_IDS", "")
|
||||
.split(",")
|
||||
.map((v) => v.trim())
|
||||
|
|
81
src/stores/turnstile/index.tsx
Normal file
81
src/stores/turnstile/index.tsx
Normal file
|
@ -0,0 +1,81 @@
|
|||
import Turnstile, { BoundTurnstileObject } from "react-turnstile";
|
||||
import { create } from "zustand";
|
||||
import { immer } from "zustand/middleware/immer";
|
||||
|
||||
import { conf } from "@/setup/config";
|
||||
|
||||
export interface TurnstileStore {
|
||||
turnstile: BoundTurnstileObject | null;
|
||||
cbs: ((token: string | null) => void)[];
|
||||
setTurnstile(v: BoundTurnstileObject | null): void;
|
||||
getToken(): Promise<string>;
|
||||
processToken(token: string | null): void;
|
||||
}
|
||||
|
||||
export const useTurnstileStore = create(
|
||||
immer<TurnstileStore>((set, get) => ({
|
||||
turnstile: null,
|
||||
cbs: [],
|
||||
processToken(token) {
|
||||
const cbs = get().cbs;
|
||||
cbs.forEach((fn) => fn(token));
|
||||
set((s) => {
|
||||
s.cbs = [];
|
||||
});
|
||||
},
|
||||
getToken() {
|
||||
return new Promise((resolve, reject) => {
|
||||
set((s) => {
|
||||
s.cbs = [
|
||||
...s.cbs,
|
||||
(token) => {
|
||||
if (!token) reject(new Error("Failed to get token"));
|
||||
else resolve(token);
|
||||
},
|
||||
];
|
||||
});
|
||||
});
|
||||
},
|
||||
setTurnstile(v) {
|
||||
set((s) => {
|
||||
s.turnstile = v;
|
||||
});
|
||||
},
|
||||
}))
|
||||
);
|
||||
|
||||
export function getTurnstile() {
|
||||
return useTurnstileStore.getState().turnstile;
|
||||
}
|
||||
|
||||
export function isTurnstileInitialized() {
|
||||
return !!getTurnstile();
|
||||
}
|
||||
|
||||
export function getTurnstileToken() {
|
||||
const turnstile = getTurnstile();
|
||||
turnstile?.reset();
|
||||
turnstile?.execute();
|
||||
return useTurnstileStore.getState().getToken();
|
||||
}
|
||||
|
||||
export function TurnstileProvider() {
|
||||
const siteKey = conf().TURNSTILE_KEY;
|
||||
const setTurnstile = useTurnstileStore((s) => s.setTurnstile);
|
||||
const processToken = useTurnstileStore((s) => s.processToken);
|
||||
if (!siteKey) return null;
|
||||
return (
|
||||
<Turnstile
|
||||
sitekey={siteKey}
|
||||
onLoad={(_widgetId, bound) => {
|
||||
setTurnstile(bound);
|
||||
}}
|
||||
onError={() => {
|
||||
processToken(null);
|
||||
}}
|
||||
onVerify={(token) => {
|
||||
processToken(token);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
|
@ -7,27 +7,52 @@ import {
|
|||
targets,
|
||||
} from "@movie-web/providers";
|
||||
|
||||
import { conf } from "@/setup/config";
|
||||
import { useAuthStore } from "@/stores/auth";
|
||||
import { getApiToken, setApiToken } from "@/backend/helpers/providerApi";
|
||||
import { getProviderApiUrls, getProxyUrls } from "@/utils/proxyUrls";
|
||||
|
||||
const originalUrls = conf().PROXY_URLS;
|
||||
let fetchersIndex = -1;
|
||||
function makeLoadbalancedList(getter: () => string[]) {
|
||||
let listIndex = -1;
|
||||
return () => {
|
||||
const fetchers = getter();
|
||||
if (listIndex === -1 || listIndex >= fetchers.length) {
|
||||
listIndex = Math.floor(Math.random() * fetchers.length);
|
||||
}
|
||||
const proxyUrl = fetchers[listIndex];
|
||||
listIndex = (listIndex + 1) % fetchers.length;
|
||||
return proxyUrl;
|
||||
};
|
||||
}
|
||||
|
||||
export function getLoadbalancedProxyUrl() {
|
||||
const fetchers = useAuthStore.getState().proxySet ?? originalUrls;
|
||||
if (fetchersIndex === -1 || fetchersIndex >= fetchers.length) {
|
||||
fetchersIndex = Math.floor(Math.random() * fetchers.length);
|
||||
}
|
||||
const proxyUrl = fetchers[fetchersIndex];
|
||||
fetchersIndex = (fetchersIndex + 1) % fetchers.length;
|
||||
return proxyUrl;
|
||||
export const getLoadbalancedProxyUrl = makeLoadbalancedList(getProxyUrls);
|
||||
export const getLoadbalancedProviderApiUrl =
|
||||
makeLoadbalancedList(getProviderApiUrls);
|
||||
|
||||
async function fetchButWithApiTokens(
|
||||
input: RequestInfo | URL,
|
||||
init?: RequestInit | undefined
|
||||
): Promise<Response> {
|
||||
const apiToken = await getApiToken();
|
||||
const headers = new Headers(init?.headers);
|
||||
if (apiToken) headers.set("X-Token", apiToken);
|
||||
const response = await fetch(
|
||||
input,
|
||||
init
|
||||
? {
|
||||
...init,
|
||||
headers,
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
const newApiToken = response.headers.get("X-Token");
|
||||
if (newApiToken) setApiToken(newApiToken);
|
||||
return response;
|
||||
}
|
||||
|
||||
function makeLoadBalancedSimpleProxyFetcher() {
|
||||
const fetcher: ProviderBuilderOptions["fetcher"] = (a, b) => {
|
||||
const fetcher: ProviderBuilderOptions["fetcher"] = async (a, b) => {
|
||||
const currentFetcher = makeSimpleProxyFetcher(
|
||||
getLoadbalancedProxyUrl(),
|
||||
fetch
|
||||
fetchButWithApiTokens
|
||||
);
|
||||
return currentFetcher(a, b);
|
||||
};
|
||||
|
|
77
src/utils/proxyUrls.ts
Normal file
77
src/utils/proxyUrls.ts
Normal file
|
@ -0,0 +1,77 @@
|
|||
import { conf } from "@/setup/config";
|
||||
import { useAuthStore } from "@/stores/auth";
|
||||
|
||||
const originalUrls = conf().PROXY_URLS;
|
||||
const types = ["proxy", "api"] as const;
|
||||
|
||||
type ParsedUrlType = (typeof types)[number];
|
||||
|
||||
export interface ParsedUrl {
|
||||
url: string;
|
||||
type: ParsedUrlType;
|
||||
}
|
||||
|
||||
function canParseUrl(url: string): boolean {
|
||||
try {
|
||||
return !!new URL(url);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function isParsedUrlType(type: string): type is ParsedUrlType {
|
||||
return types.includes(type as any);
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn a string like "a=b;c=d;d=e" into a dictionary object
|
||||
*/
|
||||
function parseParams(input: string): Record<string, string> {
|
||||
const entriesParams = input
|
||||
.split(";")
|
||||
.map((param) => param.split("=", 2).filter((part) => part.length !== 0))
|
||||
.filter((v) => v.length === 2);
|
||||
return Object.fromEntries(entriesParams);
|
||||
}
|
||||
|
||||
export function getParsedUrls() {
|
||||
const urls = useAuthStore.getState().proxySet ?? originalUrls;
|
||||
const output: ParsedUrl[] = [];
|
||||
urls.forEach((url) => {
|
||||
if (!url.startsWith("|")) {
|
||||
if (canParseUrl(url)) {
|
||||
output.push({
|
||||
url,
|
||||
type: "proxy",
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const match = /^\|([^|]+)\|(.*)$/g.exec(url);
|
||||
if (!match || !match[2]) return;
|
||||
if (!canParseUrl(match[2])) return;
|
||||
const params = parseParams(match[1]);
|
||||
const type = params.type ?? "proxy";
|
||||
|
||||
if (!isParsedUrlType(type)) return;
|
||||
output.push({
|
||||
url: match[2],
|
||||
type,
|
||||
});
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
export function getProxyUrls() {
|
||||
return getParsedUrls()
|
||||
.filter((v) => v.type === "proxy")
|
||||
.map((v) => v.url);
|
||||
}
|
||||
|
||||
export function getProviderApiUrls() {
|
||||
return getParsedUrls()
|
||||
.filter((v) => v.type === "api")
|
||||
.map((v) => v.url);
|
||||
}
|
Loading…
Reference in a new issue