1
0
Fork 0
mirror of https://github.com/sussy-code/smov.git synced 2024-12-29 16:07:40 +01:00

its not finished

This commit is contained in:
mrjvs 2023-12-18 22:53:59 +01:00
parent ed67c1e63b
commit e48af381c5
2 changed files with 58 additions and 26 deletions

View file

@ -0,0 +1,55 @@
import { ScrapeMedia } from "@movie-web/providers";
function scrapeMediaToQueryMedia(media: ScrapeMedia) {
let extra: Record<string, string> = {};
if (media.type === "show") {
extra = {
episodeNumber: media.episode.number.toString(),
episodeTmdbId: media.episode.tmdbId,
seasonNumber: media.season.number.toString(),
seasonTmdbId: media.season.tmdbId,
};
}
return {
type: media.type,
releaseYear: media.releaseYear.toString(),
imdbId: media.imdbId,
title: media.title,
...extra,
};
}
function addQueryDataToUrl(url: URL, data: Record<string, string | undefined>) {
Object.entries(data).forEach((entry) => {
if (entry[1]) url.searchParams.set(entry[0], entry[1]);
});
}
export function makeProviderUrl(base: string) {
const makeUrl = (p: string) => new URL(`${base}${p}`);
return {
scrapeSource(sourceId: string, media: ScrapeMedia) {
const url = makeUrl("/scrape/source");
addQueryDataToUrl(url, scrapeMediaToQueryMedia(media));
addQueryDataToUrl(url, { id: sourceId });
return url.toString();
},
scrapeAll(media: ScrapeMedia) {
const url = makeUrl("/scrape");
addQueryDataToUrl(url, scrapeMediaToQueryMedia(media));
return url.toString();
},
scrapeEmbed(embedId: string, embedUrl: string) {
const url = makeUrl("/scrape/embed");
addQueryDataToUrl(url, { id: embedId, url: embedUrl });
return url.toString();
},
};
}
export function connectServerSideEvents(url: string, endEvents: string[]) {
const;
return {};
}

View file

@ -5,6 +5,7 @@ import {
} from "@movie-web/providers";
import { RefObject, useCallback, useEffect, useRef, useState } from "react";
import { makeProviderUrl } from "@/backend/helpers/providerApi";
import { getLoadbalancedProviderApiUrl, providers } from "@/utils/providers";
export interface ScrapingItems {
@ -150,32 +151,8 @@ export function useScrape() {
startScrape();
const sseOutput = await new Promise<RunOutput | null>(
(resolve, reject) => {
const finalUrl = new URL(`${providerApiUrl}/scrape`);
finalUrl.searchParams.append("type", media.type);
finalUrl.searchParams.append(
"releaseYear",
media.releaseYear.toString()
);
finalUrl.searchParams.append("title", media.title);
finalUrl.searchParams.append("tmdbId", media.tmdbId);
if (media.imdbId)
finalUrl.searchParams.append("imdbId", media.imdbId);
if (media.type === "show") {
finalUrl.searchParams.append(
"episodeNumber",
media.episode.number.toString()
);
finalUrl.searchParams.append(
"episodeTmdbId",
media.episode.tmdbId
);
finalUrl.searchParams.append(
"seasonNumber",
media.season.number.toString()
);
finalUrl.searchParams.append("seasonTmdbId", media.season.tmdbId);
}
const scrapeEvents = new EventSource(finalUrl.toString());
const baseUrlMaker = makeProviderUrl(providerApiUrl);
const scrapeEvents = new EventSource(baseUrlMaker.scrapeAll(media));
scrapeEvents.addEventListener("init", (e) => {
initEvent(JSON.parse(e.data));
});