Merge pull request #220 from davvarrr/master

fix some source and add source
Koitharu 3 years ago committed by GitHub
commit 42cc0430f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -35,13 +35,30 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
override val configKeyDomain = ConfigKey.Domain(
"bato.to",
"mto.to",
"hto.to",
"mangatoto.com",
"battwo.com",
"batocomic.com",
"batocomic.net",
"batocomic.org",
"batotoo.com",
"batotwo.com",
"battwo.com",
"comiko.net",
"batotoo.com",
"comiko.org",
"mangatoto.com",
"mangatoto.net",
"mangatoto.org",
"readtoto.com",
"readtoto.net",
"readtoto.org",
"dto.to",
"hto.to",
"mto.to",
"wto.to",
"xbato.com",
"xbato.net",
"xbato.org",
"zbato.com",
"zbato.net",
"zbato.org",
)
override suspend fun getListPage(

@ -25,8 +25,11 @@ class TuMangaOnlineParser(context: MangaLoaderContext) : PagedMangaParser(
private val chapterDateFormat = SimpleDateFormat("yyyy-MM-dd", sourceLocale)
override val sortOrders = EnumSet.of(
SortOrder.ALPHABETICAL,
SortOrder.UPDATED,
SortOrder.NEWEST,
SortOrder.POPULARITY,
SortOrder.RATING
)
override suspend fun getListPage(
@ -35,16 +38,21 @@ class TuMangaOnlineParser(context: MangaLoaderContext) : PagedMangaParser(
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val order =
when (sortOrder) {
SortOrder.POPULARITY -> "likes_count"
SortOrder.UPDATED -> "release_date"
SortOrder.NEWEST -> "creation"
SortOrder.ALPHABETICAL -> "alphabetically"
SortOrder.RATING -> "score"
}
val url = buildString {
append("/library")
if (query.isNullOrEmpty()) {
append("?order_item=")
if (sortOrder == SortOrder.POPULARITY) {
append("likes_count")
}
if (sortOrder == SortOrder.NEWEST) {
append("creation")
}
append(order)
append("&order_dir=desc")
append("&filter_by=title")
if (tags != null) {

@ -0,0 +1,215 @@
package org.koitharu.kotatsu.parsers.site.fr
import okhttp3.Headers
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.exception.ParseException
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.network.UserAgents
import org.koitharu.kotatsu.parsers.util.*
import java.text.DateFormat
import java.text.SimpleDateFormat
import java.util.Calendar
import java.util.EnumSet
import java.util.Locale
@MangaSourceParser("LUGNICASCANS", "Lugnica Scans", "fr")
internal class LugnicaScans(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.LUGNICASCANS, 10) {
override val sortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.ALPHABETICAL,
SortOrder.UPDATED,
)
override val configKeyDomain = ConfigKey.Domain("lugnica-scans.com")
override val headers: Headers = Headers.Builder()
.add("User-Agent", UserAgents.CHROME_DESKTOP)
.build()
init {
context.cookieJar.insertCookies(
domain,
"reader_render=continue;",
)
}
override suspend fun getFavicons(): Favicons {
return Favicons(
listOf(
Favicon("https://$domain/favicon/favicon-32x32.png", 32, null),
),
domain,
)
}
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val url = buildString {
append("https://")
append(domain)
if (sortOrder == SortOrder.ALPHABETICAL) {
append("/mangas/")
// just to stop the search of the ALPHABETICAL page because it contains all the manga and has no page function ( to change if there is a better method to stop the search )
if (page == 2) {
append(page.toString()) // juste for break
}
}
if (sortOrder == SortOrder.UPDATED) {
append("/api/manga/home/getlast/")
append(page.toString())
}
}
val doc = webClient.httpGet(url).parseHtml()
if (sortOrder == SortOrder.UPDATED) {
return doc.select(".last_chapters-element")
.map { div ->
val a = div.selectFirstOrThrow("a.last_chapters-title")
val href = a.attrAsAbsoluteUrl("href")
Manga(
id = generateUid(href),
title = a.text(),
altTitle = null,
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = div.selectFirstOrThrow(".last_chapters-rate").ownText().toFloatOrNull()?.div(5f)
?: -1f,
isNsfw = false,
coverUrl = div.selectFirstOrThrow(".last_chapters-image img").attrAsAbsoluteUrl("src"),
tags = setOf(),
state = null,
author = null,
source = source,
)
}
} else {
val root = doc.selectFirstOrThrow(".catalog")
return root.select("div.element")
.map { div ->
val href = div.selectFirstOrThrow("a").attrAsAbsoluteUrl("href")
Manga(
id = generateUid(href),
title = div.select("a.title").text(),
altTitle = null,
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = div.selectFirstOrThrow("div.stats").lastElementChild()?.ownText()?.toFloatOrNull()
?.div(5f) ?: -1f,
isNsfw = false,
coverUrl = div.selectFirstOrThrow("img").attrAsAbsoluteUrl("src"),
tags = setOf(),
state = null,
author = null,
source = source,
)
}
}
}
override suspend fun getDetails(manga: Manga): Manga {
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val dateFormat = SimpleDateFormat("dd-MM-yyyy", Locale.FRANCE)
return manga.copy(
altTitle = null,
state = when (root.select("div.manga-tags")[3].select("a").text()) {
"En Cours" -> MangaState.ONGOING
"Fini", "Abandonné", "Licencier" -> MangaState.FINISHED
else -> null
},
// Lists the tags but there is no search on the site so it will just come back to the a-z or last list.
tags = root.select("div.manga-tags")[1].select("a").mapNotNullToSet { a ->
MangaTag(
key = a.text(),
title = a.text().toTitleCase(),
source = source,
)
},
author = root.select("div.manga-staff").text(),
description = root.selectFirst("div.manga-description div")?.text(),
chapters = root.select("div.manga-chapters_wrapper div.manga-chapter")
.mapChapters(reversed = true) { i, div ->
val a = div.selectFirstOrThrow("a")
val href = a.attrAsRelativeUrl("href")
val name = a.text()
val dateText = div.select("span").last()?.text()
MangaChapter(
id = generateUid(href),
name = name,
number = i,
url = href,
scanlator = null,
uploadDate = parseChapterDate(
dateFormat,
dateText,
),
branch = null,
source = source,
)
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val root = doc.body().requireElementById("forgen_reader")
?: throw ParseException("Root not found", fullUrl)
return root.select("img").map { img ->
val url = img.attrAsRelativeUrlOrNull("data-src") ?: img.attrAsRelativeUrlOrNull("src")
?: img.parseFailed("Image src not found")
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override suspend fun getTags(): Set<MangaTag> = emptySet()
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
val d = date?.lowercase() ?: return 0
return when {
d.startsWith("il y a") -> parseRelativeDate(date)
else -> dateFormat.tryParse(date)
}
}
private fun parseRelativeDate(date: String): Long {
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
val cal = Calendar.getInstance()
return when {
WordSet("jour", "jours").anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
WordSet("heure", "heures").anyWordIn(date) -> cal.apply { add(Calendar.HOUR, -number) }.timeInMillis
WordSet("minute", "minutes").anyWordIn(date) -> cal.apply { add(Calendar.MINUTE, -number) }.timeInMillis
WordSet("seconde", "secondes").anyWordIn(date) -> cal.apply { add(Calendar.SECOND, -number) }.timeInMillis
WordSet("mois").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
WordSet("année", "années").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
WordSet("semaine", "semaines").anyWordIn(date) -> cal.apply {
add(
Calendar.WEEK_OF_MONTH,
-number,
)
}.timeInMillis
else -> 0
}
}
}

@ -29,7 +29,7 @@ import org.koitharu.kotatsu.parsers.util.urlEncoded
import java.text.SimpleDateFormat
import java.util.EnumSet
@MangaSourceParser("ISEKAISCAN", "Isekai Scan", "en")
@MangaSourceParser("ISEKAISCAN", "Isekai Scan Top", "en")
internal class IsekaiScan(context: MangaLoaderContext) :
MadaraParser(context, MangaSource.ISEKAISCAN, "isekaiscan.top", 16) {

@ -21,7 +21,7 @@ import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl
import org.koitharu.kotatsu.parsers.util.toTitleCase
import org.koitharu.kotatsu.parsers.util.urlEncoded
@MangaSourceParser("ISEKAISCAN_EU", "IsekaiScan", "en")
@MangaSourceParser("ISEKAISCAN_EU", "Isekai Scan To", "en")
internal class IsekaiScanEuParser(context: MangaLoaderContext) :
MadaraParser(context, MangaSource.ISEKAISCAN_EU, "m.isekaiscan.to") {

@ -0,0 +1,15 @@
package org.koitharu.kotatsu.parsers.site.madara.id
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
import java.util.Locale
@MangaSourceParser("IMMORTALUPDATESID", "Immortal Updates Id", "id")
internal class ImmortalUpdatesId(context: MangaLoaderContext) :
MadaraParser(context, MangaSource.IMMORTALUPDATESID, "immortalupdates.id") {
override val datePattern = "d MMMM yyyy"
override val sourceLocale: Locale = Locale.ENGLISH
}

@ -55,10 +55,11 @@ internal abstract class MangaReaderParser(
private val mutex = Mutex()
private var lastSearchPage = 1
protected open val selectChapter = "#chapterlist > ul > li"
override suspend fun getDetails(manga: Manga): Manga {
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
val chapters = docs.select("#chapterlist > ul > li").mapChapters(reversed = true) { index, element ->
val chapters = docs.select(selectChapter).mapChapters(reversed = true) { index, element ->
val url = element.selectFirst("a")?.attrAsRelativeUrl("href") ?: return@mapChapters null
MangaChapter(
id = generateUid(url),
@ -99,7 +100,7 @@ internal abstract class MangaReaderParser(
?: tablemode.selectFirst(".infotable td:contains(حالة العمل)")
?: tablemode.selectFirst(".infotable td:contains(الحالة)")
?: tablemode.selectFirst(".infotable td:contains(Estado)")
?: docs.selectFirst(".infotable td:contains(สถานะ)")
?: tablemode.selectFirst(".infotable td:contains(สถานะ)")
?: tablemode.selectFirst(".infotable td:contains(Stato )")
?: tablemode.selectFirst(".infotable td:contains(Durum)")
?: tablemode.selectFirst(".infotable td:contains(Statüsü)")
@ -210,10 +211,11 @@ internal abstract class MangaReaderParser(
return parseMangaList(webClient.httpGet(url).parseHtml())
}
protected open val selectMangaliste = ".postbody .listupd .bs .bsx"
protected open val selectMangalist = ".postbody .listupd .bs .bsx"
protected open val selectMangaListImg = "img.ts-post-image"
protected open fun parseMangaList(docs: Document): List<Manga> {
return docs.select(selectMangaliste).mapNotNull {
return docs.select(selectMangalist).mapNotNull {
val a = it.selectFirst("a") ?: return@mapNotNull null
val relativeUrl = a.attrAsRelativeUrl("href")
val rating = it.selectFirst(".numscore")?.text()
@ -227,7 +229,7 @@ internal abstract class MangaReaderParser(
publicUrl = a.attrAsAbsoluteUrl("href"),
rating = rating,
isNsfw = isNsfwSource,
coverUrl = it.selectFirst("img.ts-post-image")?.imageUrl().orEmpty(),
coverUrl = it.selectFirst(selectMangaListImg)?.imageUrl().orEmpty(),
tags = emptySet(),
state = null,
author = null,
@ -239,13 +241,14 @@ internal abstract class MangaReaderParser(
protected open val encodedSrc = false
protected open val selectScript = "div.wrapper script"
protected open val selectPage = "div#readerarea img"
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val chapterUrl = chapter.url.toAbsoluteUrl(domain)
val docs = webClient.httpGet(chapterUrl).parseHtml()
val test = docs.select("script:containsData(ts_reader)")
if (test.isNullOrEmpty() and !encodedSrc) {
return docs.select("div#readerarea img").map { img ->
return docs.select(selectPage).map { img ->
val url = img.imageUrl()
MangaPage(
id = generateUid(url),

@ -8,6 +8,7 @@ import org.koitharu.kotatsu.parsers.model.MangaChapter
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.model.MangaState
import org.koitharu.kotatsu.parsers.model.MangaTag
import org.koitharu.kotatsu.parsers.model.SortOrder
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
import org.koitharu.kotatsu.parsers.util.domain
@ -18,13 +19,71 @@ import org.koitharu.kotatsu.parsers.util.parseHtml
import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl
import org.koitharu.kotatsu.parsers.util.toTitleCase
import org.koitharu.kotatsu.parsers.util.tryParse
import org.koitharu.kotatsu.parsers.util.urlEncoded
import java.text.SimpleDateFormat
@MangaSourceParser("SWATEAM", "Swa Team", "ar")
internal class SwaTeam(context: MangaLoaderContext) :
MangaReaderParser(context, MangaSource.SWATEAM, "swatop.club", pageSize = 42, searchPageSize = 39) {
override val datePattern = "dd-MM-yyyy"
override val datePattern = "MMMM dd, yyyy"
override val selectMangalist = ".listupd .bs .bsx"
override val selectMangaListImg = "img"
private var lastSearchPage = 1
// Tag doesn't work on manga page ( it comes from website )
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
if (!query.isNullOrEmpty()) {
if (page > lastSearchPage) {
return emptyList()
}
val url = buildString {
append("https://")
append(domain)
append("/?s=")
append(query.urlEncoded())
append("&page=")
append(page)
}
val docs = webClient.httpGet(url).parseHtml()
lastSearchPage = docs.selectFirst(".pagination .next")
?.previousElementSibling()
?.text()?.toIntOrNull() ?: 1
return parseMangaList(docs)
}
val sortQuery = when (sortOrder) {
SortOrder.ALPHABETICAL -> "title"
SortOrder.NEWEST -> "latest"
SortOrder.POPULARITY -> "popular"
SortOrder.UPDATED -> "update"
else -> ""
}
val tagKey = "genre[]".urlEncoded()
val tagQuery =
if (tags.isNullOrEmpty()) "" else tags.joinToString(separator = "&", prefix = "&") { "$tagKey=${it.key}" }
val url = buildString {
append("https://")
append(domain)
append(listUrl)
append("/?order=")
append(sortQuery)
append(tagQuery)
append("&page=")
append(page)
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
override suspend fun getDetails(manga: Manga): Manga {
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
@ -37,7 +96,7 @@ internal class SwaTeam(context: MangaLoaderContext) :
url = url,
number = index + 1,
scanlator = null,
uploadDate = dateFormat.tryParse(element.selectFirst(".chapterdate")?.text()),
uploadDate = dateFormat.tryParse(element.selectFirst(".chapter-date")?.text()),
branch = null,
source = source,
)
@ -52,22 +111,20 @@ internal class SwaTeam(context: MangaLoaderContext) :
val states = docs.selectFirst("div.spe span:contains(Ongoing)")?.text()
val state = if (states.isNullOrEmpty()) {
"Completed"
"completed"
} else {
"Ongoing"
"ongoing"
}
val mangaState = state.let {
when (it) {
"Ongoing" -> MangaState.ONGOING
"ongoing" -> MangaState.ONGOING
"Completed" -> MangaState.FINISHED
"completed" -> MangaState.FINISHED
else -> null
}
}
val author = docs.selectFirst("span.author i")?.text()
val nsfw = docs.selectFirst(".restrictcontainer") != null

@ -21,7 +21,7 @@ internal class BabelToon(context: MangaLoaderContext) :
MangaReaderParser(context, MangaSource.BABELTOON, "babeltoon.com", pageSize = 20, searchPageSize = 10) {
override val listUrl = "/series"
override val selectMangaliste = ".postbody .listupd .maindet .inmain"
override val selectMangalist = ".postbody .listupd .maindet .inmain"
override suspend fun getDetails(manga: Manga): Manga {
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()

@ -0,0 +1,70 @@
package org.koitharu.kotatsu.parsers.site.mangareader.en
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.model.Manga
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.model.MangaTag
import org.koitharu.kotatsu.parsers.model.SortOrder
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
import org.koitharu.kotatsu.parsers.util.domain
import org.koitharu.kotatsu.parsers.util.parseHtml
import org.koitharu.kotatsu.parsers.util.urlEncoded
import java.util.EnumSet
@MangaSourceParser("ZAHARD", "Zahard", "en")
internal class Zahard(context: MangaLoaderContext) :
MangaReaderParser(context, MangaSource.ZAHARD, "zahard.xyz", pageSize = 20, searchPageSize = 30) {
override val listUrl = "/library"
override val selectChapter = "#chapterlist > ul > a"
override val selectPage = "div#chapter_imgs img"
override val sortOrders: Set<SortOrder>
get() = EnumSet.of(SortOrder.NEWEST)
private var lastSearchPage = 1
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
if (!query.isNullOrEmpty()) {
if (page > lastSearchPage) {
return emptyList()
}
val url = buildString {
append("https://")
append(domain)
append(listUrl)
append("?search=")
append(query.urlEncoded())
append("&page=")
append(page)
}
val docs = webClient.httpGet(url).parseHtml()
lastSearchPage = docs.selectFirst("a[rel=next]")
?.previousElementSibling()
?.text()?.toIntOrNull() ?: 1
return parseMangaList(docs)
}
val tagKey = "tag".urlEncoded()
val tagQuery =
if (tags.isNullOrEmpty()) "" else tags.joinToString(separator = "&", prefix = "&") { "$tagKey=${it.key}" }
val url = buildString {
append("https://")
append(domain)
append(listUrl)
append("?page=")
append(page)
append(tagQuery)
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
}

@ -0,0 +1,14 @@
package org.koitharu.kotatsu.parsers.site.mangareader.id
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
@MangaSourceParser("MANGAKYO", "Mangakyo", "id")
internal class Mangakyo(context: MangaLoaderContext) :
MangaReaderParser(context, MangaSource.MANGAKYO, "mangakyo.org", pageSize = 40, searchPageSize = 20) {
override val listUrl = "/komik"
override val datePattern = "MMM d, yyyy"
}

@ -0,0 +1,166 @@
package org.koitharu.kotatsu.parsers.site.vi
import okhttp3.Headers
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.network.UserAgents
import org.koitharu.kotatsu.parsers.util.*
import java.text.SimpleDateFormat
import java.util.EnumSet
import java.util.Locale
@MangaSourceParser("LXMANGA", "Lx Manga", "vi")
internal class LxManga(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.LXMANGA, 60) {
override val sortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.ALPHABETICAL,
SortOrder.UPDATED,
SortOrder.NEWEST,
SortOrder.POPULARITY,
)
override val configKeyDomain = ConfigKey.Domain("lxmanga.net")
override val headers: Headers = Headers.Builder()
.add("User-Agent", UserAgents.CHROME_DESKTOP)
.build()
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!query.isNullOrEmpty() -> {
val skey = "filter[name]=".urlEncoded()
append("/tim-kiem?$skey")
append(query.urlEncoded())
}
!tags.isNullOrEmpty() -> {
append("/the-loai/")
for (tag in tags) {
append(tag.key)
}
}
else -> {
append("/danh-sach")
}
}
append("?page=")
append(page.toString())
append("&sort=")
when (sortOrder) {
SortOrder.POPULARITY -> append("-views")
SortOrder.UPDATED -> append("-updated_at")
SortOrder.NEWEST -> append("-created_at")
SortOrder.ALPHABETICAL -> append("name")
else -> append("-updated_at")
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select("div.grid div.manga-vertical")
.map { div ->
val href = div.selectFirstOrThrow("a").attr("href")
val img = div.selectFirstOrThrow(".cover").attr("style").substringAfter("url('").substringBefore("')")
Manga(
id = generateUid(href),
title = div.selectFirstOrThrow("a.text-ellipsis").text(),
altTitle = null,
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
isNsfw = true,
coverUrl = img,
tags = setOf(),
state = null,
author = null,
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val dateFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
return manga.copy(
altTitle = root.select(".divider2:contains(Noms associés :)").firstOrNull()?.text(),
state = when (root.select("div.grow div.mt-2:contains(Tình trạng) a").first()!!.text()) {
"Đang tiến hành" -> MangaState.ONGOING
"Đã hoàn thành" -> MangaState.FINISHED
else -> null
},
tags = root.select("div.grow div.mt-2:contains(Thể loại) span a").mapNotNullToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix("/").substringAfterLast('/'),
title = a.text().toTitleCase(),
source = source,
)
},
author = root.select("div.grow div.mt-2:contains(Tác giả) span a")
.joinToString { it.text().trim(',', ' ') },
description = root.selectFirst("div.py-4.border-t")?.html(),
chapters = root.select("ul.overflow-y-auto.overflow-x-hidden a")
.mapChapters(reversed = true) { i, a ->
val href = a.attr("href")
val name = a.selectFirstOrThrow("span.text-ellipsis").text()
val date = a.selectFirstOrThrow("span.timeago").attr("datetime")
MangaChapter(
id = generateUid(href),
name = name,
number = i,
url = href,
scanlator = null,
uploadDate = dateFormat.tryParse(date),
branch = null,
source = source,
)
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
return doc.select("div.text-center img.lazy").map { img ->
val url = img.attrAsRelativeUrlOrNull("data-src") ?: img.attrAsRelativeUrlOrNull("src")
?: img.parseFailed("Image src not found")
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override suspend fun getTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain/").parseHtml()
val body = doc.body()
return body.select("ul.absolute.w-full a").mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix("/").substringAfterLast('/'),
title = a.selectFirstOrThrow("span.text-ellipsis").text(),
source = source,
)
}
}
}
Loading…
Cancel
Save