Compare commits
10 Commits
ab65c9f15a
...
9b4a2636b2
Author | SHA1 | Date | |
---|---|---|---|
9b4a2636b2 | |||
5e17abfbb2 | |||
9f3b9a77d5 | |||
e259ef9ca6 | |||
a2a81651ec | |||
11aa6ba14f | |||
cfdfdd0418 | |||
3b8c037a27 | |||
20e304aa97 | |||
7549e5b3f8 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
||||
node_modules/
|
||||
data/credentials.json
|
||||
*.txt
|
24
README.md
24
README.md
@ -1,3 +1,27 @@
|
||||
# Kiss Asian Downloader
|
||||
|
||||
Download content from Kiss Asian through JDownloader.
|
||||
|
||||
## Installation
|
||||
|
||||
Clone this repository. Then, install the dependencies.
|
||||
|
||||
```text
|
||||
npm install
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Inside the `data` folder there is a `credentials.json.example` file. Rename this to `credentials.json` and fill it up with your own credentials.
|
||||
|
||||
## Usage
|
||||
|
||||
You can use the script with the following command:
|
||||
|
||||
```text
|
||||
node . --display <links_file>
|
||||
```
|
||||
|
||||
> The argument `--display` is used if you wish to see the puppeteer window.
|
||||
>
|
||||
> `links_file` must be a text file containing the links to KissAsian media pages separated by newlines.
|
||||
|
@ -16,6 +16,11 @@ class JDownloaderClient {
|
||||
Logger.info(`Connected to JDownloader ${this.device.name}`);
|
||||
}
|
||||
|
||||
async finish() {
|
||||
await this.client.core.disconnect();
|
||||
Logger.info(`Disconnected from JDownloader ${this.device.name}`);
|
||||
}
|
||||
|
||||
async addLinks(media) {
|
||||
if (!(media instanceof Media)) {
|
||||
throw new TypeError('Invalid Media instance passed to downloader!');
|
||||
@ -83,17 +88,19 @@ class JDownloaderClient {
|
||||
}
|
||||
|
||||
async startDownload(crawledLinks = []) {
|
||||
if (!crawledLinks.packageUUID) {
|
||||
throw new JDownloaderError('Cannot start download without packageUUID!');
|
||||
}
|
||||
|
||||
const linkIDs = crawledLinks.map((link) => link.uuid);
|
||||
|
||||
if (linkIDs.length < 1) {
|
||||
throw new JDownloaderError('No links to download!');
|
||||
}
|
||||
|
||||
await this.client.core.callAction('/linkgrabberv2/moveToDownloadlist', this.device.id, [linkIDs, [crawledLinks.packageUUID]]);
|
||||
const packageUUID = crawledLinks[0].packageUUID;
|
||||
|
||||
if (!packageUUID) {
|
||||
throw new JDownloaderError('Cannot start download without packageUUID!');
|
||||
}
|
||||
|
||||
await this.client.core.callAction('/linkgrabberv2/moveToDownloadlist', this.device.id, [linkIDs, [packageUUID]]);
|
||||
Logger.success('Download started.');
|
||||
}
|
||||
}
|
||||
|
74
src/classes/filesystem/MediaLinksFile.js
Normal file
74
src/classes/filesystem/MediaLinksFile.js
Normal file
@ -0,0 +1,74 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { FatalError } = require('../../errors');
|
||||
|
||||
class MediaLinksFile {
|
||||
constructor(filename) {
|
||||
this.file = path.join(process.cwd(), filename);
|
||||
|
||||
if (!fs.existsSync(this.file)) {
|
||||
throw new FatalError('The specified file could not be found!');
|
||||
}
|
||||
}
|
||||
|
||||
read() {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(this.file, (err, data) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(MediaLinksFile.parseBuffer(data));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
write(newContent) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writeFile(this.file, newContent, null, (err) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async removeFirstLink() {
|
||||
const items = await this.read();
|
||||
const serialized = MediaLinksFile.serialize(items.slice(1));
|
||||
await this.write(serialized);
|
||||
}
|
||||
|
||||
remove() {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.unlink(this.file, (err) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
static serialize(content) {
|
||||
return content.map((line) => `${line}\r`).join('\n');
|
||||
}
|
||||
|
||||
static parseBuffer(buffer) {
|
||||
return buffer.toString()
|
||||
.split('\n')
|
||||
.filter((line) => line)
|
||||
.map((line) => {
|
||||
const returnCarriageIndex = line.indexOf('\r');
|
||||
if (returnCarriageIndex < 0) {
|
||||
return line;
|
||||
}
|
||||
return line.substring(0, returnCarriageIndex);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MediaLinksFile;
|
@ -15,7 +15,7 @@ class Movie extends Media {
|
||||
}
|
||||
|
||||
getDownloadFilename(ext) {
|
||||
return `${this.name} (${this.year})${ext}`;
|
||||
return `${this.name} (${this.year}).${ext}`;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,12 @@ class KissAsianScraper {
|
||||
Logger.info('Cloudflare DDOS Protection bypassed, redirected to main site.');
|
||||
}
|
||||
|
||||
async finish(browser) {
|
||||
await this.page.close();
|
||||
await browser.close();
|
||||
Logger.info('Closed browser instance.');
|
||||
}
|
||||
|
||||
async authenticate(credentials) {
|
||||
await this.page.click('#topHolderBox a[href="/Login"]');
|
||||
await this.page.waitForSelector('#btnSubmit');
|
||||
@ -171,7 +177,7 @@ class KissAsianScraper {
|
||||
if (span.textContent === 'Status:') {
|
||||
const status = span.nextSibling.textContent;
|
||||
|
||||
return status === 'Completed';
|
||||
return status.includes('Completed');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -10,8 +10,13 @@ class ArgsParser {
|
||||
parse(argv) {
|
||||
const parsedArgs = parseArgs(argv, this.options);
|
||||
this.validate(parsedArgs);
|
||||
|
||||
const { _: [directory], ...rest } = parsedArgs;
|
||||
|
||||
return parsedArgs;
|
||||
return {
|
||||
...rest,
|
||||
directory
|
||||
};
|
||||
}
|
||||
|
||||
validate(parsedArgs) {
|
||||
|
30
src/index.js
30
src/index.js
@ -2,8 +2,10 @@ const puppeteer = require('puppeteer-extra');
|
||||
const StealthPlugin = require('puppeteer-extra-plugin-stealth');
|
||||
const AdBlockerPlugin = require('puppeteer-extra-plugin-adblocker');
|
||||
const ArgsParser = require('./classes/utils/ArgsParser');
|
||||
const MediaLinksFile = require('./classes/filesystem/MediaLinksFile');
|
||||
const KissAsianScraper = require('./classes/scraping/KissAsianScraper');
|
||||
const JDownloaderClient = require('./classes/download/JDownloaderClient');
|
||||
const Utils = require('./classes/utils/Utils');
|
||||
const credentials = require('../data/credentials.json');
|
||||
|
||||
const args = new ArgsParser().parse(process.argv.slice(2));
|
||||
@ -11,10 +13,26 @@ const args = new ArgsParser().parse(process.argv.slice(2));
|
||||
puppeteer.use(StealthPlugin());
|
||||
puppeteer.use(AdBlockerPlugin({ blockTrackers: true }));
|
||||
|
||||
const singleMediaProcedure = async(scraper, downloader, mediaURL) => {
|
||||
const media = await scraper.getMediaData(mediaURL);
|
||||
await scraper.populateMediaDownloadURLs(media);
|
||||
|
||||
await downloader.addLinks(media);
|
||||
|
||||
const crawledLinks = await downloader.getCrawledLinks(media.downloadURLs);
|
||||
const renamedCrawledLinks = downloader.getRenamedCrawledLinks(crawledLinks, media);
|
||||
await downloader.renameCrawledLinks(renamedCrawledLinks);
|
||||
|
||||
await downloader.startDownload(crawledLinks);
|
||||
};
|
||||
|
||||
const main = async() => {
|
||||
const browser = await puppeteer.launch({ headless: !args.display, slowMo: 250 });
|
||||
const page = await browser.newPage();
|
||||
|
||||
const file = new MediaLinksFile(args.directory);
|
||||
const mediaToDownload = await file.read();
|
||||
|
||||
const scraper = new KissAsianScraper(page);
|
||||
const downloader = new JDownloaderClient(credentials.jdownloader);
|
||||
|
||||
@ -22,9 +40,15 @@ const main = async() => {
|
||||
await scraper.load();
|
||||
await scraper.authenticate(credentials.kissasian);
|
||||
|
||||
const media = await scraper.getMediaData('https://kissasian.li/Drama/My-Roommate-is-a-Gumiho');
|
||||
|
||||
await scraper.populateMediaDownloadURLs(media);
|
||||
await Utils.mapSeries(mediaToDownload, async(url) => {
|
||||
await singleMediaProcedure(scraper, downloader, url);
|
||||
await file.removeFirstLink();
|
||||
});
|
||||
|
||||
await file.remove();
|
||||
await downloader.finish();
|
||||
await scraper.finish(browser);
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
main();
|
||||
|
Loading…
x
Reference in New Issue
Block a user