mirror of
https://git.luna-app.eu/50n50/sources
synced 2025-12-21 21:26:19 +01:00
Updated GlobalExtractor version to 1.1.8
This commit is contained in:
parent
9e589f3dc4
commit
2ff8f723ff
22 changed files with 6419 additions and 253 deletions
|
|
@ -1,18 +1,18 @@
|
|||
{
|
||||
"sourceName": "s.to (ENG DUB)",
|
||||
"iconUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sto.png",
|
||||
"author": {
|
||||
"name": "Cufiy",
|
||||
"icon": "https://files.catbox.moe/ttj4fc.gif"
|
||||
},
|
||||
"version": "0.3.14",
|
||||
"language": "English (DUB)",
|
||||
"streamType": "HLS",
|
||||
"quality": "720p",
|
||||
"baseUrl": "https://google.com",
|
||||
"searchBaseUrl": "https://s.to/ajax/seriesSearch?keyword=%s",
|
||||
"scriptUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sToEngDub_v2.js",
|
||||
"asyncJS": true,
|
||||
"streamAsyncJS": false,
|
||||
"type": "shows"
|
||||
}
|
||||
"sourceName": "s.to (ENG DUB)",
|
||||
"iconUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sto.png",
|
||||
"author": {
|
||||
"name": "Cufiy",
|
||||
"icon": "https://files.catbox.moe/ttj4fc.gif"
|
||||
},
|
||||
"version": "0.3.15",
|
||||
"language": "English (DUB)",
|
||||
"streamType": "HLS",
|
||||
"quality": "720p",
|
||||
"baseUrl": "https://google.com",
|
||||
"searchBaseUrl": "https://s.to/ajax/seriesSearch?keyword=%s",
|
||||
"scriptUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sToEngDub_v2.js",
|
||||
"asyncJS": true,
|
||||
"streamAsyncJS": false,
|
||||
"type": "shows"
|
||||
}
|
||||
|
|
@ -352,7 +352,7 @@ function base64Decode(str) {
|
|||
// EDITING THIS FILE COULD BREAK THE UPDATER AND CAUSE ISSUES WITH THE EXTRACTOR
|
||||
|
||||
/* {GE START} */
|
||||
/* {VERSION: 1.1.3} */
|
||||
/* {VERSION: 1.1.8} */
|
||||
|
||||
/**
|
||||
* @name global_extractor.js
|
||||
|
|
@ -360,8 +360,8 @@ function base64Decode(str) {
|
|||
* @author Cufiy
|
||||
* @url https://github.com/JMcrafter26/sora-global-extractor
|
||||
* @license CUSTOM LICENSE - see https://github.com/JMcrafter26/sora-global-extractor/blob/main/LICENSE
|
||||
* @date 2025-07-23 17:47:48
|
||||
* @version 1.1.3
|
||||
* @date 2025-11-05 15:44:57
|
||||
* @version 1.1.8
|
||||
* @note This file was generated automatically.
|
||||
* The global extractor comes with an auto-updating feature, so you can always get the latest version. https://github.com/JMcrafter26/sora-global-extractor#-auto-updater
|
||||
*/
|
||||
|
|
@ -374,7 +374,17 @@ function globalExtractor(providers) {
|
|||
// check if streamUrl is not null, a string, and starts with http or https
|
||||
if (streamUrl && typeof streamUrl === "string" && (streamUrl.startsWith("http"))) {
|
||||
return streamUrl;
|
||||
// if its an array, get the value that starts with http
|
||||
} else if (Array.isArray(streamUrl)) {
|
||||
const httpStream = streamUrl.find(url => url.startsWith("http"));
|
||||
if (httpStream) {
|
||||
return httpStream;
|
||||
}
|
||||
} else if (streamUrl || typeof streamUrl !== "string") {
|
||||
// check if it's a valid stream URL
|
||||
return null;
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
// Ignore the error and try the next provider
|
||||
}
|
||||
|
|
@ -437,8 +447,14 @@ async function multiExtractor(providers) {
|
|||
console.log(`Skipping ${provider} as it has already 3 streams`);
|
||||
continue;
|
||||
}
|
||||
const streamUrl = await extractStreamUrlByProvider(url, provider);
|
||||
// check if streamUrl is not null, a string, and starts with http or https
|
||||
let streamUrl = await extractStreamUrlByProvider(url, provider);
|
||||
|
||||
if (streamUrl && Array.isArray(streamUrl)) {
|
||||
const httpStream = streamUrl.find(url => url.startsWith("http"));
|
||||
if (httpStream) {
|
||||
streamUrl = httpStream;
|
||||
}
|
||||
}
|
||||
// check if provider is already in streams, if it is, add a number to it
|
||||
if (
|
||||
!streamUrl ||
|
||||
|
|
@ -493,7 +509,14 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
if(provider == 'bigwarp') {
|
||||
delete headers["User-Agent"];
|
||||
headers["x-requested-with"] = "XMLHttpRequest";
|
||||
} else if (provider == 'vk') {
|
||||
headers["encoding"] = "windows-1251"; // required
|
||||
} else if (provider == 'sibnet') {
|
||||
headers["encoding"] = "windows-1251"; // required
|
||||
} else if (provider == 'supervideo') {
|
||||
delete headers["User-Agent"];
|
||||
}
|
||||
|
||||
// fetch the url
|
||||
// and pass the response to the extractor function
|
||||
console.log("Fetching URL: " + url);
|
||||
|
|
@ -552,6 +575,13 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
console.log("Error extracting stream URL from doodstream:", error);
|
||||
return null;
|
||||
}
|
||||
case "earnvids":
|
||||
try {
|
||||
return await earnvidsExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from earnvids:", error);
|
||||
return null;
|
||||
}
|
||||
case "filemoon":
|
||||
try {
|
||||
return await filemoonExtractor(html, url);
|
||||
|
|
@ -559,6 +589,20 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
console.log("Error extracting stream URL from filemoon:", error);
|
||||
return null;
|
||||
}
|
||||
case "lulustream":
|
||||
try {
|
||||
return await lulustreamExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from lulustream:", error);
|
||||
return null;
|
||||
}
|
||||
case "megacloud":
|
||||
try {
|
||||
return await megacloudExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from megacloud:", error);
|
||||
return null;
|
||||
}
|
||||
case "mp4upload":
|
||||
try {
|
||||
return await mp4uploadExtractor(html, url);
|
||||
|
|
@ -566,6 +610,62 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
console.log("Error extracting stream URL from mp4upload:", error);
|
||||
return null;
|
||||
}
|
||||
case "sendvid":
|
||||
try {
|
||||
return await sendvidExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from sendvid:", error);
|
||||
return null;
|
||||
}
|
||||
case "sibnet":
|
||||
try {
|
||||
return await sibnetExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from sibnet:", error);
|
||||
return null;
|
||||
}
|
||||
case "streamtape":
|
||||
try {
|
||||
return await streamtapeExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from streamtape:", error);
|
||||
return null;
|
||||
}
|
||||
case "streamup":
|
||||
try {
|
||||
return await streamupExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from streamup:", error);
|
||||
return null;
|
||||
}
|
||||
case "supervideo":
|
||||
try {
|
||||
return await supervideoExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from supervideo:", error);
|
||||
return null;
|
||||
}
|
||||
case "uploadcx":
|
||||
try {
|
||||
return await uploadcxExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from uploadcx:", error);
|
||||
return null;
|
||||
}
|
||||
case "uqload":
|
||||
try {
|
||||
return await uqloadExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from uqload:", error);
|
||||
return null;
|
||||
}
|
||||
case "videospk":
|
||||
try {
|
||||
return await videospkExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from videospk:", error);
|
||||
return null;
|
||||
}
|
||||
case "vidmoly":
|
||||
try {
|
||||
return await vidmolyExtractor(html, url);
|
||||
|
|
@ -649,6 +749,28 @@ function randomStr(length) {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
/* --- earnvids --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
/**
|
||||
* @name earnvidsExtractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function earnvidsExtractor(html, url = null) {
|
||||
try {
|
||||
const obfuscatedScript = html.match(/<script[^>]*>\s*(eval\(function\(p,a,c,k,e,d.*?\)[\s\S]*?)<\/script>/);
|
||||
const unpackedScript = unpack(obfuscatedScript[1]);
|
||||
const streamMatch = unpackedScript.match(/["'](\/stream\/[^"']+)["']/);
|
||||
const hlsLink = streamMatch ? streamMatch[1] : null;
|
||||
const baseUrl = url.match(/^(https?:\/\/[^/]+)/)[1];
|
||||
console.log("HLS Link:" + baseUrl + hlsLink);
|
||||
return baseUrl + hlsLink;
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
return "https://files.catbox.moe/avolvc.mp4";
|
||||
}
|
||||
}
|
||||
|
||||
/* --- filemoon --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
|
|
@ -703,6 +825,354 @@ async function filemoonExtractor(html, url = null) {
|
|||
}
|
||||
|
||||
|
||||
/* --- lulustream --- */
|
||||
|
||||
/**
|
||||
* @name LuluStream Extractor
|
||||
* @author Cufiy
|
||||
*/
|
||||
async function lulustreamExtractor(data, url = null) {
|
||||
const scriptRegex = /sources:\s*\[\{file:"([^"]+)"/;
|
||||
const scriptMatch = scriptRegex.exec(data);
|
||||
const decoded = scriptMatch ? scriptMatch[1] : false;
|
||||
return decoded;
|
||||
}
|
||||
/* --- megacloud --- */
|
||||
|
||||
/**
|
||||
* @name megacloudExtractor
|
||||
* @author ShadeOfChaos
|
||||
*/
|
||||
|
||||
// Megacloud V3 specific
|
||||
async function megacloudExtractor(html, embedUrl) {
|
||||
// TESTING ONLY START
|
||||
const testcase = '/api/static';
|
||||
if(embedUrl.slice(-testcase.length) == testcase) {
|
||||
try {
|
||||
const response = await soraFetch(embedUrl, { method: 'GET', headers: { "referer": "https://megacloud.blog/" } });
|
||||
embedUrl = response.url;
|
||||
} catch (error) {
|
||||
throw new Error("[TESTING ONLY] Megacloud extraction error:", error);
|
||||
}
|
||||
}
|
||||
// TESTING ONLY END
|
||||
const CHARSET = Array.from({ length: 95 }, (_, i) => String.fromCharCode(i + 32));
|
||||
const xraxParams = embedUrl.split('/').pop();
|
||||
const xrax = xraxParams.includes('?') ? xraxParams.split('?')[0] : xraxParams;
|
||||
const nonce = await getNonce(embedUrl);
|
||||
// return decrypt(secretKey, nonce, encryptedText);
|
||||
try {
|
||||
const response = await soraFetch(`https://megacloud.blog/embed-2/v3/e-1/getSources?id=${xrax}&_k=${nonce}`, { method: 'GET', headers: { "referer": "https://megacloud.blog/" } });
|
||||
const rawSourceData = await response.json();
|
||||
const encrypted = rawSourceData?.sources;
|
||||
let decryptedSources = null;
|
||||
// console.log('rawSourceData', rawSourceData);
|
||||
if (rawSourceData?.encrypted == false) {
|
||||
decryptedSources = rawSourceData.sources;
|
||||
}
|
||||
if (decryptedSources == null) {
|
||||
decryptedSources = await getDecryptedSourceV3(encrypted, nonce);
|
||||
if (!decryptedSources) throw new Error("Failed to decrypt source");
|
||||
}
|
||||
// console.log("Decrypted sources:" + JSON.stringify(decryptedSources, null, 2));
|
||||
// return the first source if it's an array
|
||||
if (Array.isArray(decryptedSources) && decryptedSources.length > 0) {
|
||||
try {
|
||||
return decryptedSources[0].file;
|
||||
} catch (error) {
|
||||
console.log("Error extracting MegaCloud stream URL:" + error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// return {
|
||||
// status: true,
|
||||
// result: {
|
||||
// sources: decryptedSources,
|
||||
// tracks: rawSourceData.tracks,
|
||||
// intro: rawSourceData.intro ?? null,
|
||||
// outro: rawSourceData.outro ?? null,
|
||||
// server: rawSourceData.server ?? null
|
||||
// }
|
||||
// }
|
||||
} catch (error) {
|
||||
console.error(`[ERROR][decryptSources] Error decrypting ${embedUrl}:`, error);
|
||||
return {
|
||||
status: false,
|
||||
error: error?.message || 'Failed to get HLS link'
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Computes a key based on the given secret and nonce.
|
||||
* The key is used to "unlock" the encrypted data.
|
||||
* The computation of the key is based on the following steps:
|
||||
* 1. Concatenate the secret and nonce.
|
||||
* 2. Compute a hash value of the concatenated string using a simple
|
||||
* hash function (similar to Java's String.hashCode()).
|
||||
* 3. Compute the remainder of the hash value divided by the maximum
|
||||
* value of a 64-bit signed integer.
|
||||
* 4. Use the result as a XOR mask to process the characters of the
|
||||
* concatenated string.
|
||||
* 5. Rotate the XOR-processed string by a shift amount equal to the
|
||||
* hash value modulo the length of the XOR-processed string plus 5.
|
||||
* 6. Interleave the rotated string with the reversed nonce string.
|
||||
* 7. Take a substring of the interleaved string of length equal to 96
|
||||
* plus the hash value modulo 33.
|
||||
* 8. Convert each character of the substring to a character code
|
||||
* between 32 and 126 (inclusive) by taking the remainder of the
|
||||
* character code divided by 95 and adding 32.
|
||||
* 9. Join the resulting array of characters into a string and return it.
|
||||
* @param {string} secret - The secret string
|
||||
* @param {string} nonce - The nonce string
|
||||
* @returns {string} The computed key
|
||||
*/
|
||||
function computeKey(secret, nonce) {
|
||||
const secretAndNonce = secret + nonce;
|
||||
let hashValue = 0n;
|
||||
for (const char of secretAndNonce) {
|
||||
hashValue = BigInt(char.charCodeAt(0)) + hashValue * 31n + (hashValue << 7n) - hashValue;
|
||||
}
|
||||
const maximum64BitSignedIntegerValue = 0x7fffffffffffffffn;
|
||||
const hashValueModuloMax = hashValue % maximum64BitSignedIntegerValue;
|
||||
const xorMask = 247;
|
||||
const xorProcessedString = [...secretAndNonce]
|
||||
.map(char => String.fromCharCode(char.charCodeAt(0) ^ xorMask))
|
||||
.join('');
|
||||
const xorLen = xorProcessedString.length;
|
||||
const shiftAmount = (Number(hashValueModuloMax) % xorLen) + 5;
|
||||
const rotatedString = xorProcessedString.slice(shiftAmount) + xorProcessedString.slice(0, shiftAmount);
|
||||
const reversedNonceString = nonce.split('').reverse().join('');
|
||||
let interleavedString = '';
|
||||
const maxLen = Math.max(rotatedString.length, reversedNonceString.length);
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
interleavedString += (rotatedString[i] || '') + (reversedNonceString[i] || '');
|
||||
}
|
||||
const length = 96 + (Number(hashValueModuloMax) % 33);
|
||||
const partialString = interleavedString.substring(0, length);
|
||||
return [...partialString]
|
||||
.map(ch => String.fromCharCode((ch.charCodeAt(0) % 95) + 32))
|
||||
.join('');
|
||||
}
|
||||
/**
|
||||
* Encrypts a given text using a columnar transposition cipher with a given key.
|
||||
* The function arranges the text into a grid of columns and rows determined by the key length,
|
||||
* fills the grid column by column based on the sorted order of the key characters,
|
||||
* and returns the encrypted text by reading the grid row by row.
|
||||
*
|
||||
* @param {string} text - The text to be encrypted.
|
||||
* @param {string} key - The key that determines the order of columns in the grid.
|
||||
* @returns {string} The encrypted text.
|
||||
*/
|
||||
function columnarCipher(text, key) {
|
||||
const columns = key.length;
|
||||
const rows = Math.ceil(text.length / columns);
|
||||
const grid = Array.from({ length: rows }, () => Array(columns).fill(''));
|
||||
const columnOrder = [...key]
|
||||
.map((char, idx) => ({ char, idx }))
|
||||
.sort((a, b) => a.char.charCodeAt(0) - b.char.charCodeAt(0));
|
||||
let i = 0;
|
||||
for (const { idx } of columnOrder) {
|
||||
for (let row = 0; row < rows; row++) {
|
||||
grid[row][idx] = text[i++] || '';
|
||||
}
|
||||
}
|
||||
return grid.flat().join('');
|
||||
}
|
||||
/**
|
||||
* Deterministically unshuffles an array of characters based on a given key phrase.
|
||||
* The function simulates a pseudo-random shuffling using a numeric seed derived
|
||||
* from the key phrase. This ensures that the same character array and key phrase
|
||||
* will always produce the same output, allowing for deterministic "unshuffling".
|
||||
* @param {Array} characters - The array of characters to unshuffle.
|
||||
* @param {string} keyPhrase - The key phrase used to generate the seed for the
|
||||
* pseudo-random number generator.
|
||||
* @returns {Array} A new array representing the deterministically unshuffled characters.
|
||||
*/
|
||||
function deterministicUnshuffle(characters, keyPhrase) {
|
||||
let seed = [...keyPhrase].reduce((acc, char) => (acc * 31n + BigInt(char.charCodeAt(0))) & 0xffffffffn, 0n);
|
||||
const randomNumberGenerator = (upperLimit) => {
|
||||
seed = (seed * 1103515245n + 12345n) & 0x7fffffffn;
|
||||
return Number(seed % BigInt(upperLimit));
|
||||
};
|
||||
const shuffledCharacters = characters.slice();
|
||||
for (let i = shuffledCharacters.length - 1; i > 0; i--) {
|
||||
const j = randomNumberGenerator(i + 1);
|
||||
[shuffledCharacters[i], shuffledCharacters[j]] = [shuffledCharacters[j], shuffledCharacters[i]];
|
||||
}
|
||||
return shuffledCharacters;
|
||||
}
|
||||
/**
|
||||
* Decrypts an encrypted text using a secret key and a nonce through multiple rounds of decryption.
|
||||
* The decryption process includes base64 decoding, character substitution using a pseudo-random
|
||||
* number generator, a columnar transposition cipher, and deterministic unshuffling of the character set.
|
||||
* Finally, it extracts and parses the decrypted JSON string or verifies it using a regex pattern.
|
||||
*
|
||||
* @param {string} secretKey - The key used to decrypt the text.
|
||||
* @param {string} nonce - A nonce for additional input to the decryption key.
|
||||
* @param {string} encryptedText - The text to be decrypted, encoded in base64.
|
||||
* @param {number} [rounds=3] - The number of decryption rounds to perform.
|
||||
* @returns {Object|null} The decrypted JSON object if successful, or null if parsing fails.
|
||||
*/
|
||||
function decrypt(secretKey, nonce, encryptedText, rounds = 3) {
|
||||
let decryptedText = Buffer.from(encryptedText, 'base64').toString('utf-8');
|
||||
const keyPhrase = computeKey(secretKey, nonce);
|
||||
for (let round = rounds; round >= 1; round--) {
|
||||
const encryptionPassphrase = keyPhrase + round;
|
||||
let seed = [...encryptionPassphrase].reduce((acc, char) => (acc * 31n + BigInt(char.charCodeAt(0))) & 0xffffffffn, 0n);
|
||||
const randomNumberGenerator = (upperLimit) => {
|
||||
seed = (seed * 1103515245n + 12345n) & 0x7fffffffn;
|
||||
return Number(seed % BigInt(upperLimit));
|
||||
};
|
||||
decryptedText = [...decryptedText]
|
||||
.map(char => {
|
||||
const charIndex = CHARSET.indexOf(char);
|
||||
if (charIndex === -1) return char;
|
||||
const offset = randomNumberGenerator(95);
|
||||
return CHARSET[(charIndex - offset + 95) % 95];
|
||||
})
|
||||
.join('');
|
||||
decryptedText = columnarCipher(decryptedText, encryptionPassphrase);
|
||||
const shuffledCharset = deterministicUnshuffle(CHARSET, encryptionPassphrase);
|
||||
const mappingArr = {};
|
||||
shuffledCharset.forEach((c, i) => (mappingArr[c] = CHARSET[i]));
|
||||
decryptedText = [...decryptedText].map(char => mappingArr[char] || char).join('');
|
||||
}
|
||||
const lengthString = decryptedText.slice(0, 4);
|
||||
let length = parseInt(lengthString, 10);
|
||||
if (isNaN(length) || length <= 0 || length > decryptedText.length - 4) {
|
||||
console.error('Invalid length in decrypted string');
|
||||
return decryptedText;
|
||||
}
|
||||
const decryptedString = decryptedText.slice(4, 4 + length);
|
||||
try {
|
||||
return JSON.parse(decryptedString);
|
||||
} catch (e) {
|
||||
console.warn('Could not parse decrypted string, unlikely to be valid. Using regex to verify');
|
||||
const regex = /"file":"(.*?)".*?"type":"(.*?)"/;
|
||||
const match = encryptedText.match(regex);
|
||||
const matchedFile = match?.[1];
|
||||
const matchType = match?.[2];
|
||||
if (!matchedFile || !matchType) {
|
||||
console.error('Could not match file or type in decrypted string');
|
||||
return null;
|
||||
}
|
||||
return decryptedString;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Tries to extract the MegaCloud nonce from the given embed URL.
|
||||
*
|
||||
* Fetches the HTML of the page, and tries to extract the nonce from it.
|
||||
* If that fails, it sends a request with the "x-requested-with" header set to "XMLHttpRequest"
|
||||
* and tries to extract the nonce from that HTML.
|
||||
*
|
||||
* If all else fails, it logs the HTML of both requests and returns null.
|
||||
*
|
||||
* @param {string} embedUrl The URL of the MegaCloud embed
|
||||
* @returns {string|null} The extracted nonce, or null if it couldn't be found
|
||||
*/
|
||||
async function getNonce(embedUrl) {
|
||||
const res = await soraFetch(embedUrl, { headers: { "referer": "https://anicrush.to/", "x-requested-with": "XMLHttpRequest" } });
|
||||
const html = await res.text();
|
||||
const match0 = html.match(/\<meta[\s\S]*?name="_gg_fb"[\s\S]*?content="([\s\S]*?)">/);
|
||||
if (match0?.[1]) {
|
||||
return match0[1];
|
||||
}
|
||||
const match1 = html.match(/_is_th:(\S*?)\s/);
|
||||
if (match1?.[1]) {
|
||||
return match1[1];
|
||||
}
|
||||
const match2 = html.match(/data-dpi="([\s\S]*?)"/);
|
||||
if (match2?.[1]) {
|
||||
return match2[1];
|
||||
}
|
||||
const match3 = html.match(/_lk_db[\s]?=[\s\S]*?x:[\s]"([\S]*?)"[\s\S]*?y:[\s]"([\S]*?)"[\s\S]*?z:[\s]"([\S]*?)"/);
|
||||
if (match3?.[1] && match3?.[2] && match3?.[3]) {
|
||||
return "" + match3[1] + match3[2] + match3[3];
|
||||
}
|
||||
const match4 = html.match(/nonce="([\s\S]*?)"/);
|
||||
if (match4?.[1]) {
|
||||
if (match4[1].length >= 32) return match4[1];
|
||||
}
|
||||
const match5 = html.match(/_xy_ws = "(\S*?)"/);
|
||||
if (match5?.[1]) {
|
||||
return match5[1];
|
||||
}
|
||||
const match6 = html.match(/[a-zA-Z0-9]{48}]/);
|
||||
if (match6?.[1]) {
|
||||
return match6[1];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async function getDecryptedSourceV3(encrypted, nonce) {
|
||||
let decrypted = null;
|
||||
const keys = await asyncGetKeys();
|
||||
for(let key in keys) {
|
||||
try {
|
||||
if (!encrypted) {
|
||||
console.log("Encrypted source missing in response")
|
||||
return null;
|
||||
}
|
||||
decrypted = decrypt(keys[key], nonce, encrypted);
|
||||
if(!Array.isArray(decrypted) || decrypted.length <= 0) {
|
||||
// Failed to decrypt source
|
||||
continue;
|
||||
}
|
||||
for(let source of decrypted) {
|
||||
if(source != null && source?.file?.startsWith('https://')) {
|
||||
// Malformed decrypted source
|
||||
continue;
|
||||
}
|
||||
}
|
||||
console.log("Functioning key:", key);
|
||||
return decrypted;
|
||||
} catch(error) {
|
||||
console.error('Error:', error);
|
||||
console.error(`[${ new Date().toLocaleString() }] Key did not work: ${ key }`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async function asyncGetKeys() {
|
||||
const resolution = await Promise.allSettled([
|
||||
fetchKey("ofchaos", "https://ac-api.ofchaos.com/api/key"),
|
||||
fetchKey("yogesh", "https://raw.githubusercontent.com/yogesh-hacker/MegacloudKeys/refs/heads/main/keys.json"),
|
||||
fetchKey("esteven", "https://raw.githubusercontent.com/carlosesteven/e1-player-deobf/refs/heads/main/output/key.json")
|
||||
]);
|
||||
const keys = resolution.filter(r => r.status === 'fulfilled' && r.value != null).reduce((obj, r) => {
|
||||
let rKey = Object.keys(r.value)[0];
|
||||
let rValue = Object.values(r.value)[0];
|
||||
if (typeof rValue === 'string') {
|
||||
obj[rKey] = rValue.trim();
|
||||
return obj;
|
||||
}
|
||||
obj[rKey] = rValue?.mega ?? rValue?.decryptKey ?? rValue?.MegaCloud?.Anime?.Key ?? rValue?.megacloud?.key ?? rValue?.key ?? rValue?.megacloud?.anime?.key ?? rValue?.megacloud;
|
||||
return obj;
|
||||
}, {});
|
||||
if (keys.length === 0) {
|
||||
throw new Error("Failed to fetch any decryption key");
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
function fetchKey(name, url) {
|
||||
return new Promise(async (resolve) => {
|
||||
try {
|
||||
const response = await soraFetch(url, { method: 'get' });
|
||||
const key = await response.text();
|
||||
let trueKey = null;
|
||||
try {
|
||||
trueKey = JSON.parse(key);
|
||||
} catch (e) {
|
||||
trueKey = key;
|
||||
}
|
||||
resolve({ [name]: trueKey })
|
||||
} catch (error) {
|
||||
resolve(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
/* --- mp4upload --- */
|
||||
|
||||
/**
|
||||
|
|
@ -720,6 +1190,185 @@ async function mp4uploadExtractor(html, url = null) {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
/* --- sendvid --- */
|
||||
|
||||
/**
|
||||
* @name sendvidExtractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function sendvidExtractor(data, url = null) {
|
||||
const match = data.match(/var\s+video_source\s*=\s*"([^"]+)"/);
|
||||
const videoUrl = match ? match[1] : null;
|
||||
return videoUrl;
|
||||
}
|
||||
/* --- sibnet --- */
|
||||
|
||||
/**
|
||||
* @name sibnetExtractor
|
||||
* @author scigward
|
||||
*/
|
||||
async function sibnetExtractor(html, embedUrl) {
|
||||
try {
|
||||
const videoMatch = html.match(
|
||||
/player\.src\s*\(\s*\[\s*\{\s*src\s*:\s*["']([^"']+)["']/i
|
||||
);
|
||||
if (!videoMatch || !videoMatch[1]) {
|
||||
throw new Error("Sibnet video source not found");
|
||||
}
|
||||
const videoPath = videoMatch[1];
|
||||
const videoUrl = videoPath.startsWith("http")
|
||||
? videoPath
|
||||
: `https://video.sibnet.ru${videoPath}`;
|
||||
return videoUrl;
|
||||
} catch (error) {
|
||||
console.log("SibNet extractor error: " + error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/* --- streamtape --- */
|
||||
|
||||
/**
|
||||
*
|
||||
* @name streamTapeExtractor
|
||||
* @author ShadeOfChaos
|
||||
*/
|
||||
async function streamtapeExtractor(html, url) {
|
||||
let promises = [];
|
||||
const LINK_REGEX = /link['"]{1}\).innerHTML *= *['"]{1}([\s\S]*?)["'][\s\S]*?\(["']([\s\S]*?)["']([\s\S]*?);/g;
|
||||
const CHANGES_REGEX = /([0-9]+)/g;
|
||||
if(html == null) {
|
||||
if(url == null) {
|
||||
throw new Error('Provided incorrect parameters.');
|
||||
}
|
||||
const response = await soraFetch(url);
|
||||
html = await response.text();
|
||||
}
|
||||
const matches = html.matchAll(LINK_REGEX);
|
||||
for (const match of matches) {
|
||||
let base = match?.[1];
|
||||
let params = match?.[2];
|
||||
const changeStr = match?.[3];
|
||||
if(changeStr == null || changeStr == '') continue;
|
||||
const changes = changeStr.match(CHANGES_REGEX);
|
||||
for(let n of changes) {
|
||||
params = params.substring(n);
|
||||
}
|
||||
while(base[0] == '/') {
|
||||
base = base.substring(1);
|
||||
}
|
||||
const url = 'https://' + base + params;
|
||||
promises.push(testUrl(url));
|
||||
}
|
||||
// Race for first success
|
||||
return Promise.any(promises).then((value) => {
|
||||
return value;
|
||||
}).catch((error) => {
|
||||
return null;
|
||||
});
|
||||
async function testUrl(url) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
// Timeout version prefered, but Sora does not support it currently
|
||||
// var response = await soraFetch(url, { method: 'GET', signal: AbortSignal.timeout(2000) });
|
||||
var response = await soraFetch(url);
|
||||
if(response == null) throw new Error('Connection timed out.');
|
||||
} catch(e) {
|
||||
console.error('Rejected due to:', e.message);
|
||||
return reject(null);
|
||||
}
|
||||
if(response?.ok && response?.status === 200) {
|
||||
return resolve(url);
|
||||
}
|
||||
console.warn('Reject because of response:', response?.ok, response?.status);
|
||||
return reject(null);
|
||||
});
|
||||
}
|
||||
}
|
||||
/* --- streamup --- */
|
||||
|
||||
/**
|
||||
* @name StreamUp Extractor
|
||||
* @author Cufiy
|
||||
*/
|
||||
async function streamupExtractor(data, url = null) {
|
||||
// if url ends with /, remove it
|
||||
if (url.endsWith("/")) {
|
||||
url = url.slice(0, -1);
|
||||
}
|
||||
// split the url by / and get the last part
|
||||
const urlParts = url.split("/");
|
||||
const videoId = urlParts[urlParts.length - 1];
|
||||
const apiUrl = `https://strmup.to/ajax/stream?filecode=${videoId}`;
|
||||
const response = await soraFetch(apiUrl);
|
||||
const jsonData = await response.json();
|
||||
if (jsonData && jsonData.streaming_url) {
|
||||
return jsonData.streaming_url;
|
||||
} else {
|
||||
console.log("No streaming URL found in the response.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/* --- supervideo --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
/**
|
||||
* @name SuperVideo Extractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function supervideoExtractor(data, url = null) {
|
||||
const obfuscatedScript = data.match(/<script[^>]*>\s*(eval\(function\(p,a,c,k,e,d.*?\)[\s\S]*?)<\/script>/);
|
||||
const unpackedScript = unpack(obfuscatedScript[1]);
|
||||
const regex = /file:\s*"([^"]+\.m3u8)"/;
|
||||
const match = regex.exec(unpackedScript);
|
||||
if (match) {
|
||||
const fileUrl = match[1];
|
||||
console.log("File URL:" + fileUrl);
|
||||
return fileUrl;
|
||||
}
|
||||
return "No stream found";
|
||||
}
|
||||
|
||||
/* --- uploadcx --- */
|
||||
|
||||
/**
|
||||
* @name UploadCx Extractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function uploadcxExtractor(data, url = null) {
|
||||
const mp4Match = /sources:\s*\["([^"]+\.mp4)"]/i.exec(data);
|
||||
return mp4Match ? mp4Match[1] : null;
|
||||
}
|
||||
/* --- uqload --- */
|
||||
|
||||
/**
|
||||
* @name uqloadExtractor
|
||||
* @author scigward
|
||||
*/
|
||||
async function uqloadExtractor(html, embedUrl) {
|
||||
try {
|
||||
const match = html.match(/sources:\s*\[\s*"([^"]+\.mp4)"\s*\]/);
|
||||
const videoSrc = match ? match[1] : "";
|
||||
return videoSrc;
|
||||
} catch (error) {
|
||||
console.log("uqloadExtractor error:", error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/* --- videospk --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
/**
|
||||
* @name videospkExtractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function videospkExtractor(data, url = null) {
|
||||
const obfuscatedScript = data.match(/<script[^>]*>\s*(eval\(function\(p,a,c,k,e,d.*?\)[\s\S]*?)<\/script>/);
|
||||
const unpackedScript = unpack(obfuscatedScript[1]);
|
||||
const streamMatch = unpackedScript.match(/["'](\/stream\/[^"']+)["']/);
|
||||
const hlsLink = streamMatch ? streamMatch[1] : null;
|
||||
return "https://videospk.xyz" + hlsLink;
|
||||
}
|
||||
|
||||
/* --- vidmoly --- */
|
||||
|
||||
/**
|
||||
|
|
@ -898,7 +1547,11 @@ async function soraFetch(url, options = { headers: {}, method: 'GET', body: null
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/***********************************************************
|
||||
* UNPACKER MODULE
|
||||
* Credit to GitHub user "mnsrulz" for Unpacker Node library
|
||||
* https://github.com/mnsrulz/unpacker
|
||||
***********************************************************/
|
||||
class Unbaser {
|
||||
constructor(base) {
|
||||
this.ALPHABET = {
|
||||
|
|
@ -934,6 +1587,12 @@ class Unbaser {
|
|||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
function detectUnbaser(source) {
|
||||
/* Detects whether `source` is P.A.C.K.E.R. coded. */
|
||||
return source.replace(" ", "").startsWith("eval(function(p,a,c,k,e,");
|
||||
}
|
||||
|
||||
function unpack(source) {
|
||||
let { payload, symtab, radix, count } = _filterargs(source);
|
||||
if (count != symtab.length) {
|
||||
|
|
|
|||
|
|
@ -1,18 +1,18 @@
|
|||
{
|
||||
"sourceName": "s.to (GER DUB)",
|
||||
"iconUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sto.png",
|
||||
"author": {
|
||||
"name": "Hamzo & Cufiy",
|
||||
"icon": "https://cdn.discordapp.com/avatars/623644371819954226/591ecab10b0b4535e859bb0b9bbe62e5?size=1024"
|
||||
},
|
||||
"version": "0.3.14",
|
||||
"language": "German (DUB)",
|
||||
"streamType": "HLS",
|
||||
"quality": "720p",
|
||||
"baseUrl": "https://google.com",
|
||||
"searchBaseUrl": "https://s.to/ajax/seriesSearch?keyword=%s",
|
||||
"scriptUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sToGerDub_v2.js",
|
||||
"asyncJS": true,
|
||||
"streamAsyncJS": false,
|
||||
"type": "shows"
|
||||
}
|
||||
"sourceName": "s.to (GER DUB)",
|
||||
"iconUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sto.png",
|
||||
"author": {
|
||||
"name": "Hamzo & Cufiy",
|
||||
"icon": "https://cdn.discordapp.com/avatars/623644371819954226/591ecab10b0b4535e859bb0b9bbe62e5?size=1024"
|
||||
},
|
||||
"version": "0.3.15",
|
||||
"language": "German (DUB)",
|
||||
"streamType": "HLS",
|
||||
"quality": "720p",
|
||||
"baseUrl": "https://google.com",
|
||||
"searchBaseUrl": "https://s.to/ajax/seriesSearch?keyword=%s",
|
||||
"scriptUrl": "https://git.luna-app.eu/50n50/sources/raw/branch/main/s.to/sToGerDub_v2.js",
|
||||
"asyncJS": true,
|
||||
"streamAsyncJS": false,
|
||||
"type": "shows"
|
||||
}
|
||||
|
|
@ -352,7 +352,7 @@ function base64Decode(str) {
|
|||
// EDITING THIS FILE COULD BREAK THE UPDATER AND CAUSE ISSUES WITH THE EXTRACTOR
|
||||
|
||||
/* {GE START} */
|
||||
/* {VERSION: 1.1.3} */
|
||||
/* {VERSION: 1.1.8} */
|
||||
|
||||
/**
|
||||
* @name global_extractor.js
|
||||
|
|
@ -360,8 +360,8 @@ function base64Decode(str) {
|
|||
* @author Cufiy
|
||||
* @url https://github.com/JMcrafter26/sora-global-extractor
|
||||
* @license CUSTOM LICENSE - see https://github.com/JMcrafter26/sora-global-extractor/blob/main/LICENSE
|
||||
* @date 2025-07-23 17:47:48
|
||||
* @version 1.1.3
|
||||
* @date 2025-11-05 15:44:57
|
||||
* @version 1.1.8
|
||||
* @note This file was generated automatically.
|
||||
* The global extractor comes with an auto-updating feature, so you can always get the latest version. https://github.com/JMcrafter26/sora-global-extractor#-auto-updater
|
||||
*/
|
||||
|
|
@ -374,7 +374,17 @@ function globalExtractor(providers) {
|
|||
// check if streamUrl is not null, a string, and starts with http or https
|
||||
if (streamUrl && typeof streamUrl === "string" && (streamUrl.startsWith("http"))) {
|
||||
return streamUrl;
|
||||
// if its an array, get the value that starts with http
|
||||
} else if (Array.isArray(streamUrl)) {
|
||||
const httpStream = streamUrl.find(url => url.startsWith("http"));
|
||||
if (httpStream) {
|
||||
return httpStream;
|
||||
}
|
||||
} else if (streamUrl || typeof streamUrl !== "string") {
|
||||
// check if it's a valid stream URL
|
||||
return null;
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
// Ignore the error and try the next provider
|
||||
}
|
||||
|
|
@ -437,8 +447,14 @@ async function multiExtractor(providers) {
|
|||
console.log(`Skipping ${provider} as it has already 3 streams`);
|
||||
continue;
|
||||
}
|
||||
const streamUrl = await extractStreamUrlByProvider(url, provider);
|
||||
// check if streamUrl is not null, a string, and starts with http or https
|
||||
let streamUrl = await extractStreamUrlByProvider(url, provider);
|
||||
|
||||
if (streamUrl && Array.isArray(streamUrl)) {
|
||||
const httpStream = streamUrl.find(url => url.startsWith("http"));
|
||||
if (httpStream) {
|
||||
streamUrl = httpStream;
|
||||
}
|
||||
}
|
||||
// check if provider is already in streams, if it is, add a number to it
|
||||
if (
|
||||
!streamUrl ||
|
||||
|
|
@ -493,7 +509,14 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
if(provider == 'bigwarp') {
|
||||
delete headers["User-Agent"];
|
||||
headers["x-requested-with"] = "XMLHttpRequest";
|
||||
} else if (provider == 'vk') {
|
||||
headers["encoding"] = "windows-1251"; // required
|
||||
} else if (provider == 'sibnet') {
|
||||
headers["encoding"] = "windows-1251"; // required
|
||||
} else if (provider == 'supervideo') {
|
||||
delete headers["User-Agent"];
|
||||
}
|
||||
|
||||
// fetch the url
|
||||
// and pass the response to the extractor function
|
||||
console.log("Fetching URL: " + url);
|
||||
|
|
@ -552,6 +575,13 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
console.log("Error extracting stream URL from doodstream:", error);
|
||||
return null;
|
||||
}
|
||||
case "earnvids":
|
||||
try {
|
||||
return await earnvidsExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from earnvids:", error);
|
||||
return null;
|
||||
}
|
||||
case "filemoon":
|
||||
try {
|
||||
return await filemoonExtractor(html, url);
|
||||
|
|
@ -559,6 +589,20 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
console.log("Error extracting stream URL from filemoon:", error);
|
||||
return null;
|
||||
}
|
||||
case "lulustream":
|
||||
try {
|
||||
return await lulustreamExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from lulustream:", error);
|
||||
return null;
|
||||
}
|
||||
case "megacloud":
|
||||
try {
|
||||
return await megacloudExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from megacloud:", error);
|
||||
return null;
|
||||
}
|
||||
case "mp4upload":
|
||||
try {
|
||||
return await mp4uploadExtractor(html, url);
|
||||
|
|
@ -566,6 +610,62 @@ async function extractStreamUrlByProvider(url, provider) {
|
|||
console.log("Error extracting stream URL from mp4upload:", error);
|
||||
return null;
|
||||
}
|
||||
case "sendvid":
|
||||
try {
|
||||
return await sendvidExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from sendvid:", error);
|
||||
return null;
|
||||
}
|
||||
case "sibnet":
|
||||
try {
|
||||
return await sibnetExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from sibnet:", error);
|
||||
return null;
|
||||
}
|
||||
case "streamtape":
|
||||
try {
|
||||
return await streamtapeExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from streamtape:", error);
|
||||
return null;
|
||||
}
|
||||
case "streamup":
|
||||
try {
|
||||
return await streamupExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from streamup:", error);
|
||||
return null;
|
||||
}
|
||||
case "supervideo":
|
||||
try {
|
||||
return await supervideoExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from supervideo:", error);
|
||||
return null;
|
||||
}
|
||||
case "uploadcx":
|
||||
try {
|
||||
return await uploadcxExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from uploadcx:", error);
|
||||
return null;
|
||||
}
|
||||
case "uqload":
|
||||
try {
|
||||
return await uqloadExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from uqload:", error);
|
||||
return null;
|
||||
}
|
||||
case "videospk":
|
||||
try {
|
||||
return await videospkExtractor(html, url);
|
||||
} catch (error) {
|
||||
console.log("Error extracting stream URL from videospk:", error);
|
||||
return null;
|
||||
}
|
||||
case "vidmoly":
|
||||
try {
|
||||
return await vidmolyExtractor(html, url);
|
||||
|
|
@ -649,6 +749,28 @@ function randomStr(length) {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
/* --- earnvids --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
/**
|
||||
* @name earnvidsExtractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function earnvidsExtractor(html, url = null) {
|
||||
try {
|
||||
const obfuscatedScript = html.match(/<script[^>]*>\s*(eval\(function\(p,a,c,k,e,d.*?\)[\s\S]*?)<\/script>/);
|
||||
const unpackedScript = unpack(obfuscatedScript[1]);
|
||||
const streamMatch = unpackedScript.match(/["'](\/stream\/[^"']+)["']/);
|
||||
const hlsLink = streamMatch ? streamMatch[1] : null;
|
||||
const baseUrl = url.match(/^(https?:\/\/[^/]+)/)[1];
|
||||
console.log("HLS Link:" + baseUrl + hlsLink);
|
||||
return baseUrl + hlsLink;
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
return "https://files.catbox.moe/avolvc.mp4";
|
||||
}
|
||||
}
|
||||
|
||||
/* --- filemoon --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
|
|
@ -703,6 +825,354 @@ async function filemoonExtractor(html, url = null) {
|
|||
}
|
||||
|
||||
|
||||
/* --- lulustream --- */
|
||||
|
||||
/**
|
||||
* @name LuluStream Extractor
|
||||
* @author Cufiy
|
||||
*/
|
||||
async function lulustreamExtractor(data, url = null) {
|
||||
const scriptRegex = /sources:\s*\[\{file:"([^"]+)"/;
|
||||
const scriptMatch = scriptRegex.exec(data);
|
||||
const decoded = scriptMatch ? scriptMatch[1] : false;
|
||||
return decoded;
|
||||
}
|
||||
/* --- megacloud --- */
|
||||
|
||||
/**
|
||||
* @name megacloudExtractor
|
||||
* @author ShadeOfChaos
|
||||
*/
|
||||
|
||||
// Megacloud V3 specific
|
||||
async function megacloudExtractor(html, embedUrl) {
|
||||
// TESTING ONLY START
|
||||
const testcase = '/api/static';
|
||||
if(embedUrl.slice(-testcase.length) == testcase) {
|
||||
try {
|
||||
const response = await soraFetch(embedUrl, { method: 'GET', headers: { "referer": "https://megacloud.blog/" } });
|
||||
embedUrl = response.url;
|
||||
} catch (error) {
|
||||
throw new Error("[TESTING ONLY] Megacloud extraction error:", error);
|
||||
}
|
||||
}
|
||||
// TESTING ONLY END
|
||||
const CHARSET = Array.from({ length: 95 }, (_, i) => String.fromCharCode(i + 32));
|
||||
const xraxParams = embedUrl.split('/').pop();
|
||||
const xrax = xraxParams.includes('?') ? xraxParams.split('?')[0] : xraxParams;
|
||||
const nonce = await getNonce(embedUrl);
|
||||
// return decrypt(secretKey, nonce, encryptedText);
|
||||
try {
|
||||
const response = await soraFetch(`https://megacloud.blog/embed-2/v3/e-1/getSources?id=${xrax}&_k=${nonce}`, { method: 'GET', headers: { "referer": "https://megacloud.blog/" } });
|
||||
const rawSourceData = await response.json();
|
||||
const encrypted = rawSourceData?.sources;
|
||||
let decryptedSources = null;
|
||||
// console.log('rawSourceData', rawSourceData);
|
||||
if (rawSourceData?.encrypted == false) {
|
||||
decryptedSources = rawSourceData.sources;
|
||||
}
|
||||
if (decryptedSources == null) {
|
||||
decryptedSources = await getDecryptedSourceV3(encrypted, nonce);
|
||||
if (!decryptedSources) throw new Error("Failed to decrypt source");
|
||||
}
|
||||
// console.log("Decrypted sources:" + JSON.stringify(decryptedSources, null, 2));
|
||||
// return the first source if it's an array
|
||||
if (Array.isArray(decryptedSources) && decryptedSources.length > 0) {
|
||||
try {
|
||||
return decryptedSources[0].file;
|
||||
} catch (error) {
|
||||
console.log("Error extracting MegaCloud stream URL:" + error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// return {
|
||||
// status: true,
|
||||
// result: {
|
||||
// sources: decryptedSources,
|
||||
// tracks: rawSourceData.tracks,
|
||||
// intro: rawSourceData.intro ?? null,
|
||||
// outro: rawSourceData.outro ?? null,
|
||||
// server: rawSourceData.server ?? null
|
||||
// }
|
||||
// }
|
||||
} catch (error) {
|
||||
console.error(`[ERROR][decryptSources] Error decrypting ${embedUrl}:`, error);
|
||||
return {
|
||||
status: false,
|
||||
error: error?.message || 'Failed to get HLS link'
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Computes a key based on the given secret and nonce.
|
||||
* The key is used to "unlock" the encrypted data.
|
||||
* The computation of the key is based on the following steps:
|
||||
* 1. Concatenate the secret and nonce.
|
||||
* 2. Compute a hash value of the concatenated string using a simple
|
||||
* hash function (similar to Java's String.hashCode()).
|
||||
* 3. Compute the remainder of the hash value divided by the maximum
|
||||
* value of a 64-bit signed integer.
|
||||
* 4. Use the result as a XOR mask to process the characters of the
|
||||
* concatenated string.
|
||||
* 5. Rotate the XOR-processed string by a shift amount equal to the
|
||||
* hash value modulo the length of the XOR-processed string plus 5.
|
||||
* 6. Interleave the rotated string with the reversed nonce string.
|
||||
* 7. Take a substring of the interleaved string of length equal to 96
|
||||
* plus the hash value modulo 33.
|
||||
* 8. Convert each character of the substring to a character code
|
||||
* between 32 and 126 (inclusive) by taking the remainder of the
|
||||
* character code divided by 95 and adding 32.
|
||||
* 9. Join the resulting array of characters into a string and return it.
|
||||
* @param {string} secret - The secret string
|
||||
* @param {string} nonce - The nonce string
|
||||
* @returns {string} The computed key
|
||||
*/
|
||||
function computeKey(secret, nonce) {
|
||||
const secretAndNonce = secret + nonce;
|
||||
let hashValue = 0n;
|
||||
for (const char of secretAndNonce) {
|
||||
hashValue = BigInt(char.charCodeAt(0)) + hashValue * 31n + (hashValue << 7n) - hashValue;
|
||||
}
|
||||
const maximum64BitSignedIntegerValue = 0x7fffffffffffffffn;
|
||||
const hashValueModuloMax = hashValue % maximum64BitSignedIntegerValue;
|
||||
const xorMask = 247;
|
||||
const xorProcessedString = [...secretAndNonce]
|
||||
.map(char => String.fromCharCode(char.charCodeAt(0) ^ xorMask))
|
||||
.join('');
|
||||
const xorLen = xorProcessedString.length;
|
||||
const shiftAmount = (Number(hashValueModuloMax) % xorLen) + 5;
|
||||
const rotatedString = xorProcessedString.slice(shiftAmount) + xorProcessedString.slice(0, shiftAmount);
|
||||
const reversedNonceString = nonce.split('').reverse().join('');
|
||||
let interleavedString = '';
|
||||
const maxLen = Math.max(rotatedString.length, reversedNonceString.length);
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
interleavedString += (rotatedString[i] || '') + (reversedNonceString[i] || '');
|
||||
}
|
||||
const length = 96 + (Number(hashValueModuloMax) % 33);
|
||||
const partialString = interleavedString.substring(0, length);
|
||||
return [...partialString]
|
||||
.map(ch => String.fromCharCode((ch.charCodeAt(0) % 95) + 32))
|
||||
.join('');
|
||||
}
|
||||
/**
|
||||
* Encrypts a given text using a columnar transposition cipher with a given key.
|
||||
* The function arranges the text into a grid of columns and rows determined by the key length,
|
||||
* fills the grid column by column based on the sorted order of the key characters,
|
||||
* and returns the encrypted text by reading the grid row by row.
|
||||
*
|
||||
* @param {string} text - The text to be encrypted.
|
||||
* @param {string} key - The key that determines the order of columns in the grid.
|
||||
* @returns {string} The encrypted text.
|
||||
*/
|
||||
function columnarCipher(text, key) {
|
||||
const columns = key.length;
|
||||
const rows = Math.ceil(text.length / columns);
|
||||
const grid = Array.from({ length: rows }, () => Array(columns).fill(''));
|
||||
const columnOrder = [...key]
|
||||
.map((char, idx) => ({ char, idx }))
|
||||
.sort((a, b) => a.char.charCodeAt(0) - b.char.charCodeAt(0));
|
||||
let i = 0;
|
||||
for (const { idx } of columnOrder) {
|
||||
for (let row = 0; row < rows; row++) {
|
||||
grid[row][idx] = text[i++] || '';
|
||||
}
|
||||
}
|
||||
return grid.flat().join('');
|
||||
}
|
||||
/**
|
||||
* Deterministically unshuffles an array of characters based on a given key phrase.
|
||||
* The function simulates a pseudo-random shuffling using a numeric seed derived
|
||||
* from the key phrase. This ensures that the same character array and key phrase
|
||||
* will always produce the same output, allowing for deterministic "unshuffling".
|
||||
* @param {Array} characters - The array of characters to unshuffle.
|
||||
* @param {string} keyPhrase - The key phrase used to generate the seed for the
|
||||
* pseudo-random number generator.
|
||||
* @returns {Array} A new array representing the deterministically unshuffled characters.
|
||||
*/
|
||||
function deterministicUnshuffle(characters, keyPhrase) {
|
||||
let seed = [...keyPhrase].reduce((acc, char) => (acc * 31n + BigInt(char.charCodeAt(0))) & 0xffffffffn, 0n);
|
||||
const randomNumberGenerator = (upperLimit) => {
|
||||
seed = (seed * 1103515245n + 12345n) & 0x7fffffffn;
|
||||
return Number(seed % BigInt(upperLimit));
|
||||
};
|
||||
const shuffledCharacters = characters.slice();
|
||||
for (let i = shuffledCharacters.length - 1; i > 0; i--) {
|
||||
const j = randomNumberGenerator(i + 1);
|
||||
[shuffledCharacters[i], shuffledCharacters[j]] = [shuffledCharacters[j], shuffledCharacters[i]];
|
||||
}
|
||||
return shuffledCharacters;
|
||||
}
|
||||
/**
|
||||
* Decrypts an encrypted text using a secret key and a nonce through multiple rounds of decryption.
|
||||
* The decryption process includes base64 decoding, character substitution using a pseudo-random
|
||||
* number generator, a columnar transposition cipher, and deterministic unshuffling of the character set.
|
||||
* Finally, it extracts and parses the decrypted JSON string or verifies it using a regex pattern.
|
||||
*
|
||||
* @param {string} secretKey - The key used to decrypt the text.
|
||||
* @param {string} nonce - A nonce for additional input to the decryption key.
|
||||
* @param {string} encryptedText - The text to be decrypted, encoded in base64.
|
||||
* @param {number} [rounds=3] - The number of decryption rounds to perform.
|
||||
* @returns {Object|null} The decrypted JSON object if successful, or null if parsing fails.
|
||||
*/
|
||||
function decrypt(secretKey, nonce, encryptedText, rounds = 3) {
|
||||
let decryptedText = Buffer.from(encryptedText, 'base64').toString('utf-8');
|
||||
const keyPhrase = computeKey(secretKey, nonce);
|
||||
for (let round = rounds; round >= 1; round--) {
|
||||
const encryptionPassphrase = keyPhrase + round;
|
||||
let seed = [...encryptionPassphrase].reduce((acc, char) => (acc * 31n + BigInt(char.charCodeAt(0))) & 0xffffffffn, 0n);
|
||||
const randomNumberGenerator = (upperLimit) => {
|
||||
seed = (seed * 1103515245n + 12345n) & 0x7fffffffn;
|
||||
return Number(seed % BigInt(upperLimit));
|
||||
};
|
||||
decryptedText = [...decryptedText]
|
||||
.map(char => {
|
||||
const charIndex = CHARSET.indexOf(char);
|
||||
if (charIndex === -1) return char;
|
||||
const offset = randomNumberGenerator(95);
|
||||
return CHARSET[(charIndex - offset + 95) % 95];
|
||||
})
|
||||
.join('');
|
||||
decryptedText = columnarCipher(decryptedText, encryptionPassphrase);
|
||||
const shuffledCharset = deterministicUnshuffle(CHARSET, encryptionPassphrase);
|
||||
const mappingArr = {};
|
||||
shuffledCharset.forEach((c, i) => (mappingArr[c] = CHARSET[i]));
|
||||
decryptedText = [...decryptedText].map(char => mappingArr[char] || char).join('');
|
||||
}
|
||||
const lengthString = decryptedText.slice(0, 4);
|
||||
let length = parseInt(lengthString, 10);
|
||||
if (isNaN(length) || length <= 0 || length > decryptedText.length - 4) {
|
||||
console.error('Invalid length in decrypted string');
|
||||
return decryptedText;
|
||||
}
|
||||
const decryptedString = decryptedText.slice(4, 4 + length);
|
||||
try {
|
||||
return JSON.parse(decryptedString);
|
||||
} catch (e) {
|
||||
console.warn('Could not parse decrypted string, unlikely to be valid. Using regex to verify');
|
||||
const regex = /"file":"(.*?)".*?"type":"(.*?)"/;
|
||||
const match = encryptedText.match(regex);
|
||||
const matchedFile = match?.[1];
|
||||
const matchType = match?.[2];
|
||||
if (!matchedFile || !matchType) {
|
||||
console.error('Could not match file or type in decrypted string');
|
||||
return null;
|
||||
}
|
||||
return decryptedString;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Tries to extract the MegaCloud nonce from the given embed URL.
|
||||
*
|
||||
* Fetches the HTML of the page, and tries to extract the nonce from it.
|
||||
* If that fails, it sends a request with the "x-requested-with" header set to "XMLHttpRequest"
|
||||
* and tries to extract the nonce from that HTML.
|
||||
*
|
||||
* If all else fails, it logs the HTML of both requests and returns null.
|
||||
*
|
||||
* @param {string} embedUrl The URL of the MegaCloud embed
|
||||
* @returns {string|null} The extracted nonce, or null if it couldn't be found
|
||||
*/
|
||||
async function getNonce(embedUrl) {
|
||||
const res = await soraFetch(embedUrl, { headers: { "referer": "https://anicrush.to/", "x-requested-with": "XMLHttpRequest" } });
|
||||
const html = await res.text();
|
||||
const match0 = html.match(/\<meta[\s\S]*?name="_gg_fb"[\s\S]*?content="([\s\S]*?)">/);
|
||||
if (match0?.[1]) {
|
||||
return match0[1];
|
||||
}
|
||||
const match1 = html.match(/_is_th:(\S*?)\s/);
|
||||
if (match1?.[1]) {
|
||||
return match1[1];
|
||||
}
|
||||
const match2 = html.match(/data-dpi="([\s\S]*?)"/);
|
||||
if (match2?.[1]) {
|
||||
return match2[1];
|
||||
}
|
||||
const match3 = html.match(/_lk_db[\s]?=[\s\S]*?x:[\s]"([\S]*?)"[\s\S]*?y:[\s]"([\S]*?)"[\s\S]*?z:[\s]"([\S]*?)"/);
|
||||
if (match3?.[1] && match3?.[2] && match3?.[3]) {
|
||||
return "" + match3[1] + match3[2] + match3[3];
|
||||
}
|
||||
const match4 = html.match(/nonce="([\s\S]*?)"/);
|
||||
if (match4?.[1]) {
|
||||
if (match4[1].length >= 32) return match4[1];
|
||||
}
|
||||
const match5 = html.match(/_xy_ws = "(\S*?)"/);
|
||||
if (match5?.[1]) {
|
||||
return match5[1];
|
||||
}
|
||||
const match6 = html.match(/[a-zA-Z0-9]{48}]/);
|
||||
if (match6?.[1]) {
|
||||
return match6[1];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async function getDecryptedSourceV3(encrypted, nonce) {
|
||||
let decrypted = null;
|
||||
const keys = await asyncGetKeys();
|
||||
for(let key in keys) {
|
||||
try {
|
||||
if (!encrypted) {
|
||||
console.log("Encrypted source missing in response")
|
||||
return null;
|
||||
}
|
||||
decrypted = decrypt(keys[key], nonce, encrypted);
|
||||
if(!Array.isArray(decrypted) || decrypted.length <= 0) {
|
||||
// Failed to decrypt source
|
||||
continue;
|
||||
}
|
||||
for(let source of decrypted) {
|
||||
if(source != null && source?.file?.startsWith('https://')) {
|
||||
// Malformed decrypted source
|
||||
continue;
|
||||
}
|
||||
}
|
||||
console.log("Functioning key:", key);
|
||||
return decrypted;
|
||||
} catch(error) {
|
||||
console.error('Error:', error);
|
||||
console.error(`[${ new Date().toLocaleString() }] Key did not work: ${ key }`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async function asyncGetKeys() {
|
||||
const resolution = await Promise.allSettled([
|
||||
fetchKey("ofchaos", "https://ac-api.ofchaos.com/api/key"),
|
||||
fetchKey("yogesh", "https://raw.githubusercontent.com/yogesh-hacker/MegacloudKeys/refs/heads/main/keys.json"),
|
||||
fetchKey("esteven", "https://raw.githubusercontent.com/carlosesteven/e1-player-deobf/refs/heads/main/output/key.json")
|
||||
]);
|
||||
const keys = resolution.filter(r => r.status === 'fulfilled' && r.value != null).reduce((obj, r) => {
|
||||
let rKey = Object.keys(r.value)[0];
|
||||
let rValue = Object.values(r.value)[0];
|
||||
if (typeof rValue === 'string') {
|
||||
obj[rKey] = rValue.trim();
|
||||
return obj;
|
||||
}
|
||||
obj[rKey] = rValue?.mega ?? rValue?.decryptKey ?? rValue?.MegaCloud?.Anime?.Key ?? rValue?.megacloud?.key ?? rValue?.key ?? rValue?.megacloud?.anime?.key ?? rValue?.megacloud;
|
||||
return obj;
|
||||
}, {});
|
||||
if (keys.length === 0) {
|
||||
throw new Error("Failed to fetch any decryption key");
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
function fetchKey(name, url) {
|
||||
return new Promise(async (resolve) => {
|
||||
try {
|
||||
const response = await soraFetch(url, { method: 'get' });
|
||||
const key = await response.text();
|
||||
let trueKey = null;
|
||||
try {
|
||||
trueKey = JSON.parse(key);
|
||||
} catch (e) {
|
||||
trueKey = key;
|
||||
}
|
||||
resolve({ [name]: trueKey })
|
||||
} catch (error) {
|
||||
resolve(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
/* --- mp4upload --- */
|
||||
|
||||
/**
|
||||
|
|
@ -720,6 +1190,185 @@ async function mp4uploadExtractor(html, url = null) {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
/* --- sendvid --- */
|
||||
|
||||
/**
|
||||
* @name sendvidExtractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function sendvidExtractor(data, url = null) {
|
||||
const match = data.match(/var\s+video_source\s*=\s*"([^"]+)"/);
|
||||
const videoUrl = match ? match[1] : null;
|
||||
return videoUrl;
|
||||
}
|
||||
/* --- sibnet --- */
|
||||
|
||||
/**
|
||||
* @name sibnetExtractor
|
||||
* @author scigward
|
||||
*/
|
||||
async function sibnetExtractor(html, embedUrl) {
|
||||
try {
|
||||
const videoMatch = html.match(
|
||||
/player\.src\s*\(\s*\[\s*\{\s*src\s*:\s*["']([^"']+)["']/i
|
||||
);
|
||||
if (!videoMatch || !videoMatch[1]) {
|
||||
throw new Error("Sibnet video source not found");
|
||||
}
|
||||
const videoPath = videoMatch[1];
|
||||
const videoUrl = videoPath.startsWith("http")
|
||||
? videoPath
|
||||
: `https://video.sibnet.ru${videoPath}`;
|
||||
return videoUrl;
|
||||
} catch (error) {
|
||||
console.log("SibNet extractor error: " + error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/* --- streamtape --- */
|
||||
|
||||
/**
|
||||
*
|
||||
* @name streamTapeExtractor
|
||||
* @author ShadeOfChaos
|
||||
*/
|
||||
async function streamtapeExtractor(html, url) {
|
||||
let promises = [];
|
||||
const LINK_REGEX = /link['"]{1}\).innerHTML *= *['"]{1}([\s\S]*?)["'][\s\S]*?\(["']([\s\S]*?)["']([\s\S]*?);/g;
|
||||
const CHANGES_REGEX = /([0-9]+)/g;
|
||||
if(html == null) {
|
||||
if(url == null) {
|
||||
throw new Error('Provided incorrect parameters.');
|
||||
}
|
||||
const response = await soraFetch(url);
|
||||
html = await response.text();
|
||||
}
|
||||
const matches = html.matchAll(LINK_REGEX);
|
||||
for (const match of matches) {
|
||||
let base = match?.[1];
|
||||
let params = match?.[2];
|
||||
const changeStr = match?.[3];
|
||||
if(changeStr == null || changeStr == '') continue;
|
||||
const changes = changeStr.match(CHANGES_REGEX);
|
||||
for(let n of changes) {
|
||||
params = params.substring(n);
|
||||
}
|
||||
while(base[0] == '/') {
|
||||
base = base.substring(1);
|
||||
}
|
||||
const url = 'https://' + base + params;
|
||||
promises.push(testUrl(url));
|
||||
}
|
||||
// Race for first success
|
||||
return Promise.any(promises).then((value) => {
|
||||
return value;
|
||||
}).catch((error) => {
|
||||
return null;
|
||||
});
|
||||
async function testUrl(url) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
// Timeout version prefered, but Sora does not support it currently
|
||||
// var response = await soraFetch(url, { method: 'GET', signal: AbortSignal.timeout(2000) });
|
||||
var response = await soraFetch(url);
|
||||
if(response == null) throw new Error('Connection timed out.');
|
||||
} catch(e) {
|
||||
console.error('Rejected due to:', e.message);
|
||||
return reject(null);
|
||||
}
|
||||
if(response?.ok && response?.status === 200) {
|
||||
return resolve(url);
|
||||
}
|
||||
console.warn('Reject because of response:', response?.ok, response?.status);
|
||||
return reject(null);
|
||||
});
|
||||
}
|
||||
}
|
||||
/* --- streamup --- */
|
||||
|
||||
/**
|
||||
* @name StreamUp Extractor
|
||||
* @author Cufiy
|
||||
*/
|
||||
async function streamupExtractor(data, url = null) {
|
||||
// if url ends with /, remove it
|
||||
if (url.endsWith("/")) {
|
||||
url = url.slice(0, -1);
|
||||
}
|
||||
// split the url by / and get the last part
|
||||
const urlParts = url.split("/");
|
||||
const videoId = urlParts[urlParts.length - 1];
|
||||
const apiUrl = `https://strmup.to/ajax/stream?filecode=${videoId}`;
|
||||
const response = await soraFetch(apiUrl);
|
||||
const jsonData = await response.json();
|
||||
if (jsonData && jsonData.streaming_url) {
|
||||
return jsonData.streaming_url;
|
||||
} else {
|
||||
console.log("No streaming URL found in the response.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/* --- supervideo --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
/**
|
||||
* @name SuperVideo Extractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function supervideoExtractor(data, url = null) {
|
||||
const obfuscatedScript = data.match(/<script[^>]*>\s*(eval\(function\(p,a,c,k,e,d.*?\)[\s\S]*?)<\/script>/);
|
||||
const unpackedScript = unpack(obfuscatedScript[1]);
|
||||
const regex = /file:\s*"([^"]+\.m3u8)"/;
|
||||
const match = regex.exec(unpackedScript);
|
||||
if (match) {
|
||||
const fileUrl = match[1];
|
||||
console.log("File URL:" + fileUrl);
|
||||
return fileUrl;
|
||||
}
|
||||
return "No stream found";
|
||||
}
|
||||
|
||||
/* --- uploadcx --- */
|
||||
|
||||
/**
|
||||
* @name UploadCx Extractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function uploadcxExtractor(data, url = null) {
|
||||
const mp4Match = /sources:\s*\["([^"]+\.mp4)"]/i.exec(data);
|
||||
return mp4Match ? mp4Match[1] : null;
|
||||
}
|
||||
/* --- uqload --- */
|
||||
|
||||
/**
|
||||
* @name uqloadExtractor
|
||||
* @author scigward
|
||||
*/
|
||||
async function uqloadExtractor(html, embedUrl) {
|
||||
try {
|
||||
const match = html.match(/sources:\s*\[\s*"([^"]+\.mp4)"\s*\]/);
|
||||
const videoSrc = match ? match[1] : "";
|
||||
return videoSrc;
|
||||
} catch (error) {
|
||||
console.log("uqloadExtractor error:", error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/* --- videospk --- */
|
||||
|
||||
/* {REQUIRED PLUGINS: unbaser} */
|
||||
/**
|
||||
* @name videospkExtractor
|
||||
* @author 50/50
|
||||
*/
|
||||
async function videospkExtractor(data, url = null) {
|
||||
const obfuscatedScript = data.match(/<script[^>]*>\s*(eval\(function\(p,a,c,k,e,d.*?\)[\s\S]*?)<\/script>/);
|
||||
const unpackedScript = unpack(obfuscatedScript[1]);
|
||||
const streamMatch = unpackedScript.match(/["'](\/stream\/[^"']+)["']/);
|
||||
const hlsLink = streamMatch ? streamMatch[1] : null;
|
||||
return "https://videospk.xyz" + hlsLink;
|
||||
}
|
||||
|
||||
/* --- vidmoly --- */
|
||||
|
||||
/**
|
||||
|
|
@ -898,7 +1547,11 @@ async function soraFetch(url, options = { headers: {}, method: 'GET', body: null
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/***********************************************************
|
||||
* UNPACKER MODULE
|
||||
* Credit to GitHub user "mnsrulz" for Unpacker Node library
|
||||
* https://github.com/mnsrulz/unpacker
|
||||
***********************************************************/
|
||||
class Unbaser {
|
||||
constructor(base) {
|
||||
this.ALPHABET = {
|
||||
|
|
@ -934,6 +1587,12 @@ class Unbaser {
|
|||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
function detectUnbaser(source) {
|
||||
/* Detects whether `source` is P.A.C.K.E.R. coded. */
|
||||
return source.replace(" ", "").startsWith("eval(function(p,a,c,k,e,");
|
||||
}
|
||||
|
||||
function unpack(source) {
|
||||
let { payload, symtab, radix, count } = _filterargs(source);
|
||||
if (count != symtab.length) {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue