Merge branch 'master' into anch

pull/365/head
AwkwardPeak7 3 years ago committed by GitHub
commit 5cebc965fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -2,8 +2,8 @@ import tasks.ReportGenerateTask
plugins { plugins {
id 'java-library' id 'java-library'
id 'org.jetbrains.kotlin.jvm' version '1.9.0' id 'org.jetbrains.kotlin.jvm' version '1.9.20'
id 'com.google.devtools.ksp' version '1.9.0-1.0.13' id 'com.google.devtools.ksp' version '1.9.20-1.0.14'
id 'maven-publish' id 'maven-publish'
} }
@ -54,19 +54,19 @@ afterEvaluate {
dependencies { dependencies {
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.3' implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.3'
implementation 'com.squareup.okhttp3:okhttp:4.11.0' implementation 'com.squareup.okhttp3:okhttp:4.12.0'
implementation 'com.squareup.okio:okio:3.3.0' implementation 'com.squareup.okio:okio:3.6.0'
api 'org.jsoup:jsoup:1.16.1' api 'org.jsoup:jsoup:1.16.2'
implementation 'org.json:json:20230618' implementation 'org.json:json:20231013'
implementation 'androidx.collection:collection-ktx:1.2.0' implementation 'androidx.collection:collection-ktx:1.3.0'
implementation "com.daveanthonythomas.moshipack:moshipack:1.0.1" implementation "com.daveanthonythomas.moshipack:moshipack:1.0.1"
ksp project(':kotatsu-parsers-ksp') ksp project(':kotatsu-parsers-ksp')
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.9.3' testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.0'
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.9.3' testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.10.0'
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.9.3' testImplementation 'org.junit.jupiter:junit-jupiter-params:5.10.0'
testImplementation 'org.jetbrains.kotlinx:kotlinx-coroutines-test:1.7.1' testImplementation 'org.jetbrains.kotlinx:kotlinx-coroutines-test:1.7.3'
testImplementation 'io.webfolder:quickjs:1.1.0' testImplementation 'io.webfolder:quickjs:1.1.0'
} }

@ -3,5 +3,5 @@ plugins {
} }
dependencies { dependencies {
implementation 'com.google.devtools.ksp:symbol-processing-api:1.9.0-1.0.13' implementation 'com.google.devtools.ksp:symbol-processing-api:1.9.20-1.0.14'
} }

@ -0,0 +1,136 @@
package org.koitharu.kotatsu.parsers.site.en
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.text.SimpleDateFormat
import java.util.*
@MangaSourceParser("BEETOON", "BeeToon.net", "en")
internal class BeeToon(context: MangaLoaderContext) :
PagedMangaParser(context, MangaSource.BEETOON, pageSize = 30) {
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.UPDATED, SortOrder.POPULARITY)
override val configKeyDomain = ConfigKey.Domain("ww7.beetoon.net")
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
val url = buildString {
append("https://")
append(domain)
when {
!query.isNullOrEmpty() -> {
if (page > 1) {
return emptyList()
}
append("/?s=")
append(query.urlEncoded())
}
!tags.isNullOrEmpty() -> {
append("/genre/")
append(tag?.key.orEmpty())
append("/page-")
append(page)
append("/")
}
else -> {
when (sortOrder) {
SortOrder.UPDATED -> append("/latest-update/")
SortOrder.POPULARITY -> append("/popular-manga/")
else -> append("/latest-update/")
}
append("page-")
append(page)
append("/")
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select(".comics-grid .entry").map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
coverUrl = div.selectFirst("img")?.src().orEmpty(),
title = div.selectFirst(".name")?.text().orEmpty(),
altTitle = null,
rating = div.selectFirst(".counter")?.text()?.toFloatOrNull()?.div(5f) ?: RATING_UNKNOWN,
tags = emptySet(),
author = null,
state = when (div.selectLastOrThrow(".status span").text()) {
"Ongoing" -> MangaState.ONGOING
"Completed" -> MangaState.FINISHED
else -> null
},
source = source,
isNsfw = isNsfwSource,
)
}
}
override suspend fun getTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain/").parseHtml()
return doc.requireElementById("menu-item-3").select("ul.sub-menu li a").mapNotNullToSet {
MangaTag(
key = it.attr("href").removeSuffix('/').substringAfterLast('/'),
title = it.text(),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
return manga.copy(
description = doc.getElementById("desc")?.text().orEmpty(),
rating = doc.selectFirst(".counter")?.text()?.toFloatOrNull()?.div(5f) ?: RATING_UNKNOWN,
tags = doc.body().select(".info .genre a").mapNotNullToSet {
MangaTag(
key = it.attr("href").removeSuffix('/').substringAfterLast('/'),
title = it.text(),
source = source,
)
},
author = doc.selectFirst(".info .author a")?.text(),
chapters = doc.select(".items-chapters a").mapChapters(reversed = true) { i, a ->
val url = a.attrAsRelativeUrl("href").toAbsoluteUrl(domain)
MangaChapter(
id = generateUid(url),
name = a.selectFirstOrThrow(".chap").text(),
number = i + 1,
url = url,
scanlator = null,
uploadDate = SimpleDateFormat("MM/dd/yyyy HH:mm:ss", Locale.ENGLISH)
.tryParse(a.selectFirst(".chapter-date")?.attr("title") ?: "0"),
branch = null,
source = source,
)
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
return doc.select(".chapter-content-inner center img").map { img ->
val urlPage = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
MangaPage(
id = generateUid(urlPage),
url = urlPage,
preview = null,
source = source,
)
}
}
}

@ -1,6 +1,5 @@
package org.koitharu.kotatsu.parsers.site.en package org.koitharu.kotatsu.parsers.site.en
import kotlinx.coroutines.coroutineScope
import org.koitharu.kotatsu.parsers.MangaLoaderContext import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser import org.koitharu.kotatsu.parsers.PagedMangaParser
@ -77,9 +76,9 @@ class ManhwasMen(context: MangaLoaderContext) :
} }
} }
override suspend fun getDetails(manga: Manga): Manga = coroutineScope { override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml() val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
manga.copy( return manga.copy(
tags = doc.body().select(".genres a").mapNotNullToSet { a -> tags = doc.body().select(".genres a").mapNotNullToSet { a ->
MangaTag( MangaTag(
key = a.attr("href").substringAfterLast('='), key = a.attr("href").substringAfterLast('='),

@ -1,4 +1,4 @@
package org.koitharu.kotatsu.parsers.site.en package org.koitharu.kotatsu.parsers.site.es
import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.coroutineScope
import okhttp3.Headers import okhttp3.Headers
@ -12,13 +12,13 @@ import org.koitharu.kotatsu.parsers.network.UserAgents
import org.koitharu.kotatsu.parsers.util.* import org.koitharu.kotatsu.parsers.util.*
import java.util.* import java.util.*
@MangaSourceParser("TEMPLESCAN", "TempleScan", "en") @MangaSourceParser("TEMPLESCANESP", "TempleScanEsp", "es", ContentType.HENTAI)
internal class TempleScan(context: MangaLoaderContext) : internal class TempleScanEsp(context: MangaLoaderContext) :
PagedMangaParser(context, MangaSource.TEMPLESCAN, pageSize = 15) { PagedMangaParser(context, MangaSource.TEMPLESCANESP, pageSize = 15) {
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.NEWEST, SortOrder.UPDATED) override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.NEWEST, SortOrder.UPDATED)
override val configKeyDomain = ConfigKey.Domain("templescan.net") override val configKeyDomain = ConfigKey.Domain("templescanesp.net")
override val headers: Headers = Headers.Builder() override val headers: Headers = Headers.Builder()
.add("User-Agent", UserAgents.CHROME_DESKTOP) .add("User-Agent", UserAgents.CHROME_DESKTOP)
@ -61,7 +61,7 @@ internal class TempleScan(context: MangaLoaderContext) :
author = null, author = null,
state = null, state = null,
source = source, source = source,
isNsfw = false, isNsfw = isNsfwSource,
) )
} }
} }
@ -75,7 +75,6 @@ internal class TempleScan(context: MangaLoaderContext) :
manga.copy( manga.copy(
description = doc.requireElementById("section-sinopsis").html(), description = doc.requireElementById("section-sinopsis").html(),
chapters = chaptersDeferred, chapters = chaptersDeferred,
isNsfw = false,
) )
} }

@ -31,6 +31,8 @@ internal abstract class HeanCms(
.add("User-Agent", UserAgents.CHROME_DESKTOP) .add("User-Agent", UserAgents.CHROME_DESKTOP)
.build() .build()
protected open val pathManga = "series"
//For some sources, you need to send a json. For the moment, this part only works in Get. ( ex source need json gloriousscan.com , omegascans.org ) //For some sources, you need to send a json. For the moment, this part only works in Get. ( ex source need json gloriousscan.com , omegascans.org )
override suspend fun getListPage( override suspend fun getListPage(
page: Int, page: Int,
@ -78,7 +80,7 @@ internal abstract class HeanCms(
val json = webClient.httpGet(url).parseJson() val json = webClient.httpGet(url).parseJson()
return json.getJSONArray("data").mapJSON { j -> return json.getJSONArray("data").mapJSON { j ->
val slug = j.getString("series_slug") val slug = j.getString("series_slug")
val urlManga = "https://$domain/series/$slug" val urlManga = "https://$domain/$pathManga/$slug"
val cover = if (j.getString("thumbnail").contains('/')) { val cover = if (j.getString("thumbnail").contains('/')) {
j.getString("thumbnail") j.getString("thumbnail")
} else { } else {
@ -120,13 +122,13 @@ internal abstract class HeanCms(
.drop(1) .drop(1)
return manga.copy( return manga.copy(
altTitle = root.selectFirstOrThrow("p.text-center.text-gray-400").text(), altTitle = root.selectFirst("p.text-center.text-gray-400")?.text(),
tags = emptySet(), tags = emptySet(),
author = root.select("div.flex.flex-col.gap-y-2 p:contains(Autor:) strong").text(), author = root.select("div.flex.flex-col.gap-y-2 p:contains(Autor:) strong").text(),
description = root.selectFirst("h5:contains(Desc) + .bg-gray-800")?.html(), description = root.selectFirst("h5:contains(Desc) + .bg-gray-800")?.html(),
chapters = chapter.mapChapters(reversed = true) { i, it -> chapters = chapter.mapChapters(reversed = true) { i, it ->
val slugChapter = it.substringAfter("chapter_slug\":\"").substringBefore("\",\"") val slugChapter = it.substringAfter("chapter_slug\":\"").substringBefore("\",\"")
val url = "https://$domain/series/$slug/$slugChapter" val url = "https://$domain/$pathManga/$slug/$slugChapter"
val date = it.substringAfter("created_at\":\"").substringBefore("\",\"").substringBefore("T") val date = it.substringAfter("created_at\":\"").substringBefore("\",\"").substringBefore("T")
val name = slugChapter.replace("-", " ") val name = slugChapter.replace("-", " ")
MangaChapter( MangaChapter(

@ -0,0 +1,12 @@
package org.koitharu.kotatsu.parsers.site.heancms.en
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.site.heancms.HeanCms
@MangaSourceParser("TEMPLESCAN", "TempleScan", "en")
internal class TempleScan(context: MangaLoaderContext) :
HeanCms(context, MangaSource.TEMPLESCAN, "templescan.net") {
override val pathManga = "comic"
}

@ -1,15 +0,0 @@
package org.koitharu.kotatsu.parsers.site.madara.es
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.model.ContentType
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
@MangaSourceParser("TEMPLESCANESP", "TempleScanEsp", "es", ContentType.HENTAI)
internal class TempleScanEsp(context: MangaLoaderContext) :
MadaraParser(context, MangaSource.TEMPLESCANESP, "templescanesp.com") {
override val listUrl = "series/"
override val tagPrefix = "genero/"
override val datePattern = "dd.MM.yyyy"
}

@ -1,8 +1,13 @@
package org.koitharu.kotatsu.parsers.site.mangareader package org.koitharu.kotatsu.parsers.site.mangareader
import androidx.collection.ArrayMap import androidx.collection.ArrayMap
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.sync.Mutex import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock import kotlinx.coroutines.sync.withLock
import okhttp3.Cookie
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.Interceptor
import okhttp3.Response
import org.json.JSONObject import org.json.JSONObject
import org.jsoup.nodes.Document import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.MangaLoaderContext import org.koitharu.kotatsu.parsers.MangaLoaderContext
@ -19,7 +24,7 @@ internal abstract class MangaReaderParser(
domain: String, domain: String,
pageSize: Int, pageSize: Int,
searchPageSize: Int, searchPageSize: Int,
) : PagedMangaParser(context, source, pageSize, searchPageSize) { ) : PagedMangaParser(context, source, pageSize, searchPageSize), Interceptor {
override val configKeyDomain = ConfigKey.Domain(domain) override val configKeyDomain = ConfigKey.Domain(domain)
@ -296,4 +301,23 @@ internal abstract class MangaReaderParser(
tagCache = tagMap tagCache = tagMap
return@withLock tagMap return@withLock tagMap
} }
override fun intercept(chain: Interceptor.Chain): Response {
val response = chain.proceed(chain.request())
if (context.cookieJar.getCookies(domain).none { it.name.contains("NetShield") }) {
val cookie = runBlocking { response.parseHtml().getNetShieldCookie() } ?: return response
context.cookieJar.insertCookie(domain, cookie)
return chain.proceed(response.request.newBuilder().build())
}
return response
}
private suspend fun Document.getNetShieldCookie(): Cookie? {
val script = select("script").firstNotNullOfOrNull { s ->
s.html().takeIf { x -> x.contains("slowAES.decrypt") }
} ?: return null
val min = webClient.httpGet("https://$domain/min.js").parseRaw()
val res = context.evaluateJs(min + "\n\n" + script.replace("document.cookie =", "return"))
return Cookie.parse(baseUri().toHttpUrl(), res ?: return null)
}
} }

@ -1,6 +1,5 @@
package org.koitharu.kotatsu.parsers.site.tr package org.koitharu.kotatsu.parsers.site.tr
import kotlinx.coroutines.coroutineScope
import org.koitharu.kotatsu.parsers.MangaLoaderContext import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser import org.koitharu.kotatsu.parsers.PagedMangaParser
@ -89,9 +88,9 @@ class TrWebtoon(context: MangaLoaderContext) :
} }
} }
override suspend fun getDetails(manga: Manga): Manga = coroutineScope { override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml() val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
manga.copy( return manga.copy(
tags = doc.body().select("li.movie__year a").mapNotNullToSet { a -> tags = doc.body().select("li.movie__year a").mapNotNullToSet { a ->
MangaTag( MangaTag(
key = a.attr("href").substringAfterLast('='), key = a.attr("href").substringAfterLast('='),

Loading…
Cancel
Save