Add AComics, ReaperComics ( en )
parent
eeafb3c6fd
commit
c88d7de138
@ -0,0 +1,148 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.en
|
||||||
|
|
||||||
|
import okhttp3.Headers
|
||||||
|
import org.jsoup.nodes.Document
|
||||||
|
import org.koitharu.kotatsu.parsers.ErrorMessages
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import java.text.DateFormat
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("REAPERCOMICS", "ReaperComics", "en")
|
||||||
|
internal class ReaperComics(context: MangaLoaderContext) :
|
||||||
|
PagedMangaParser(context, MangaSource.REAPERCOMICS, pageSize = 30) {
|
||||||
|
|
||||||
|
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.UPDATED, SortOrder.ALPHABETICAL)
|
||||||
|
|
||||||
|
override val configKeyDomain = ConfigKey.Domain("reapercomics.com")
|
||||||
|
|
||||||
|
override val headers: Headers = Headers.Builder()
|
||||||
|
.add("User-Agent", UserAgents.CHROME_DESKTOP)
|
||||||
|
.build()
|
||||||
|
|
||||||
|
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
|
||||||
|
|
||||||
|
val url = buildString {
|
||||||
|
append("https://")
|
||||||
|
append(domain)
|
||||||
|
when (filter) {
|
||||||
|
is MangaListFilter.Search -> {
|
||||||
|
throw IllegalArgumentException(ErrorMessages.SEARCH_NOT_SUPPORTED) // TODO
|
||||||
|
}
|
||||||
|
|
||||||
|
is MangaListFilter.Advanced -> {
|
||||||
|
append("/")
|
||||||
|
if (filter.sortOrder == SortOrder.UPDATED) {
|
||||||
|
append("latest/")
|
||||||
|
}
|
||||||
|
append("comics?page=")
|
||||||
|
append(page.toString())
|
||||||
|
}
|
||||||
|
|
||||||
|
null -> {
|
||||||
|
append("/latest/comics?page=")
|
||||||
|
append(page.toString())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return parseMangaList(webClient.httpGet(url).parseHtml())
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun parseMangaList(docs: Document): List<Manga> {
|
||||||
|
|
||||||
|
|
||||||
|
return docs.select("main div.relative, main li.col-span-1").map {
|
||||||
|
val a = it.selectFirstOrThrow("a")
|
||||||
|
val url = a.attrAsAbsoluteUrl("href")
|
||||||
|
Manga(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
title = (it.selectFirst("p a") ?: it.selectLast("a"))?.text().orEmpty(),
|
||||||
|
altTitle = null,
|
||||||
|
publicUrl = url,
|
||||||
|
rating = RATING_UNKNOWN,
|
||||||
|
isNsfw = isNsfwSource,
|
||||||
|
coverUrl = it.selectFirstOrThrow("img").src().orEmpty(),
|
||||||
|
tags = emptySet(),
|
||||||
|
state = null,
|
||||||
|
author = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getAvailableTags(): Set<MangaTag> = emptySet()
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga {
|
||||||
|
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
val simpleDateFormat = SimpleDateFormat("dd/MM/yyyy", sourceLocale)
|
||||||
|
return manga.copy(
|
||||||
|
description = doc.selectFirst("div.p-4 p.prose")?.html(),
|
||||||
|
state = when (doc.selectFirst("dl.mt-2 div:contains(Status) dd")?.text()?.lowercase()) {
|
||||||
|
"ongoing" -> MangaState.ONGOING
|
||||||
|
"complete" -> MangaState.FINISHED
|
||||||
|
else -> null
|
||||||
|
},
|
||||||
|
chapters = doc.select("div.p-2 div.pb-4 ul li").mapChapters(reversed = true) { i, li ->
|
||||||
|
val a = li.selectFirstOrThrow("a")
|
||||||
|
val chapterUrl = a.attrAsAbsoluteUrl("href").toRelativeUrl(domain)
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(chapterUrl),
|
||||||
|
name = li.selectFirst("div.truncate p.truncate")?.text().orEmpty(),
|
||||||
|
number = i + 1,
|
||||||
|
url = chapterUrl,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = parseChapterDate(
|
||||||
|
simpleDateFormat,
|
||||||
|
li.selectFirst("div.truncate div.items-center")?.text(),
|
||||||
|
),
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||||
|
val d = date?.lowercase() ?: return 0
|
||||||
|
return when {
|
||||||
|
d.endsWith(" ago") -> parseRelativeDate(date)
|
||||||
|
else -> dateFormat.tryParse(date)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun parseRelativeDate(date: String): Long {
|
||||||
|
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||||
|
val cal = Calendar.getInstance()
|
||||||
|
return when {
|
||||||
|
WordSet("second").anyWordIn(date) -> cal.apply { add(Calendar.SECOND, -number) }.timeInMillis
|
||||||
|
WordSet("minute", "minutes").anyWordIn(date) -> cal.apply { add(Calendar.MINUTE, -number) }.timeInMillis
|
||||||
|
WordSet("hour", "hours").anyWordIn(date) -> cal.apply { add(Calendar.HOUR, -number) }.timeInMillis
|
||||||
|
WordSet("day", "days").anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||||
|
WordSet("week", "weeks").anyWordIn(date) -> cal.apply { add(Calendar.WEEK_OF_YEAR, -number) }.timeInMillis
|
||||||
|
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||||
|
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||||
|
else -> 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
return doc.select("img.max-w-full").map { img ->
|
||||||
|
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,174 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.ru
|
||||||
|
|
||||||
|
import androidx.collection.ArrayMap
|
||||||
|
import kotlinx.coroutines.sync.Mutex
|
||||||
|
import kotlinx.coroutines.sync.withLock
|
||||||
|
import org.jsoup.nodes.Document
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("ACOMICS", "AComics", "ru", ContentType.COMICS)
|
||||||
|
internal class AComics(context: MangaLoaderContext) :
|
||||||
|
PagedMangaParser(context, MangaSource.ACOMICS, pageSize = 10) {
|
||||||
|
|
||||||
|
override val availableSortOrders: Set<SortOrder> =
|
||||||
|
EnumSet.of(SortOrder.UPDATED, SortOrder.ALPHABETICAL, SortOrder.POPULARITY)
|
||||||
|
|
||||||
|
override val availableStates: Set<MangaState> = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED)
|
||||||
|
|
||||||
|
override val configKeyDomain = ConfigKey.Domain("acomics.ru")
|
||||||
|
|
||||||
|
init {
|
||||||
|
paginator.firstPage = 0
|
||||||
|
searchPaginator.firstPage = 0
|
||||||
|
context.cookieJar.insertCookies(domain, "ageRestrict=18")
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
|
||||||
|
val url = buildString {
|
||||||
|
append("https://")
|
||||||
|
append(domain)
|
||||||
|
when (filter) {
|
||||||
|
is MangaListFilter.Search -> {
|
||||||
|
if (page > 0) {
|
||||||
|
return emptyList()
|
||||||
|
}
|
||||||
|
append("/search?keyword=")
|
||||||
|
append(filter.query)
|
||||||
|
}
|
||||||
|
|
||||||
|
is MangaListFilter.Advanced -> {
|
||||||
|
append("/comics?ratings[]=1&ratings[]=2&ratings[]=3&ratings[]=4&ratings[]=5&ratings[]=6&skip=")
|
||||||
|
append((page * 10).toString())
|
||||||
|
append("&sort=")
|
||||||
|
append(
|
||||||
|
when (filter.sortOrder) {
|
||||||
|
SortOrder.UPDATED -> "last_update"
|
||||||
|
SortOrder.ALPHABETICAL -> "serial_name"
|
||||||
|
SortOrder.POPULARITY -> "subscr_count"
|
||||||
|
else -> "last_update"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if (filter.tags.isNotEmpty()) {
|
||||||
|
append("&categories=")
|
||||||
|
append(filter.tags.joinToString(separator = ",") { it.key })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filter.states.isNotEmpty()) {
|
||||||
|
append("&updatable=")
|
||||||
|
append(
|
||||||
|
filter.states.oneOrThrowIfMany().let {
|
||||||
|
when (it) {
|
||||||
|
MangaState.ONGOING -> "yes"
|
||||||
|
MangaState.FINISHED -> "no"
|
||||||
|
else -> "0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
null -> {
|
||||||
|
append("/comics?ratings[]=1&ratings[]=2&ratings[]=3&ratings[]=4&ratings[]=5&ratings[]=6&sort=last_update&skip=")
|
||||||
|
append((page * 20).toString())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return parseMangaList(webClient.httpGet(url).parseHtml())
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun parseMangaList(docs: Document): List<Manga> {
|
||||||
|
return docs.select("table.list-loadable").map {
|
||||||
|
val a = it.selectFirstOrThrow("a")
|
||||||
|
val url = a.attrAsAbsoluteUrl("href") + "/about"
|
||||||
|
Manga(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
title = it.selectFirstOrThrow(".title").text(),
|
||||||
|
altTitle = null,
|
||||||
|
publicUrl = url,
|
||||||
|
rating = RATING_UNKNOWN,
|
||||||
|
isNsfw = isNsfwSource,
|
||||||
|
coverUrl = it.selectFirstOrThrow("img").src().orEmpty(),
|
||||||
|
tags = emptySet(),
|
||||||
|
state = null,
|
||||||
|
author = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private var tagCache: ArrayMap<String, MangaTag>? = null
|
||||||
|
private val mutex = Mutex()
|
||||||
|
|
||||||
|
override suspend fun getAvailableTags(): Set<MangaTag> {
|
||||||
|
return getOrCreateTagMap().values.toSet()
|
||||||
|
}
|
||||||
|
|
||||||
|
private suspend fun getOrCreateTagMap(): Map<String, MangaTag> = mutex.withLock {
|
||||||
|
tagCache?.let { return@withLock it }
|
||||||
|
val tagMap = ArrayMap<String, MangaTag>()
|
||||||
|
val tagElements =
|
||||||
|
webClient.httpGet("https://$domain/comics").parseHtml().requireElementById("catalog").select(" a.button")
|
||||||
|
for (el in tagElements) {
|
||||||
|
val name = el.html().substringAfterLast("</span>")
|
||||||
|
if (name.isEmpty()) continue
|
||||||
|
tagMap[name] = MangaTag(
|
||||||
|
title = name,
|
||||||
|
key = el.attr("onclick").substringAfterLast("('").substringBefore("')"),
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
tagCache = tagMap
|
||||||
|
return@withLock tagMap
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga {
|
||||||
|
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
val tagMap = getOrCreateTagMap()
|
||||||
|
val tags = doc.select("p.serial-about-badges .category").mapNotNullToSet { tagMap[it.text()] }
|
||||||
|
return manga.copy(
|
||||||
|
tags = tags,
|
||||||
|
description = doc.selectFirst("section.serial-about-text p")?.text(),
|
||||||
|
author = doc.selectFirst("p:contains(Автор оригинала:)")?.text()?.replace("Автор оригинала: ", ""),
|
||||||
|
chapters = listOf(
|
||||||
|
MangaChapter(
|
||||||
|
id = manga.id,
|
||||||
|
name = manga.title,
|
||||||
|
number = 1,
|
||||||
|
url = manga.url.replace("/about", "/"),
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = 0,
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val doc = webClient.httpGet(chapter.url + "1").parseHtml()
|
||||||
|
val totalPages = doc.selectFirstOrThrow("span.issueNumber").text().substringAfterLast('/').toInt()
|
||||||
|
return (1..totalPages).map {
|
||||||
|
val url = chapter.url + it
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPageUrl(page: MangaPage): String {
|
||||||
|
val doc = webClient.httpGet(page.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
return doc.requireElementById("mainImage").src() ?: doc.parseFailed("Image src not found")
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue