Small fixes

master
Koitharu 1 year ago
parent dd7568659f
commit f26fecb714
Signed by: Koitharu
GPG Key ID: 676DEE768C17A9D7

@ -201,7 +201,7 @@ internal class ComickFunParser(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = getChapters(comic.getString("hid")),
)
}

@ -184,7 +184,7 @@ internal class ExHentaiParser(
rawTitle.contains("(ongoing)", ignoreCase = true) -> MangaState.ONGOING
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
source = source,
)
}

@ -558,7 +558,7 @@ internal class HitomiLaParser(context: MangaLoaderContext) : LegacyMangaParser(c
"https://${getDomain("${subDomain}a")}/webp/$commonId$imageId/$hash.webp"
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
publicUrl = json.getString("galleryurl").toAbsoluteUrl(domain),
tags =
buildSet

@ -171,7 +171,7 @@ internal class ImHentai(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = listOf(
MangaChapter(
id = manga.id,

@ -134,7 +134,7 @@ internal abstract class LineWebtoonsParser(
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOf(parseTag(jo.getJSONObject("genreInfo"))),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = jo.getString("synopsis"),
// I don't think the API provides this info
state = null,
@ -165,7 +165,7 @@ internal abstract class LineWebtoonsParser(
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = null,
tags = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = null,
state = null,
source = source,
@ -211,7 +211,7 @@ internal abstract class LineWebtoonsParser(
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOfNotNull(genres[jo.getString("representGenre")]),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = jo.getString("synopsis"),
// I don't think the API provides this info
state = null,

@ -213,7 +213,7 @@ internal abstract class MangaFireParser(
else -> null
}
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = document.selectFirstOrThrow("#synopsis div.modal-content").html(),
chapters = getChapters(manga.url, document),
)

@ -196,7 +196,7 @@ internal class MangaPark(context: MangaLoaderContext) :
val author = doc.selectFirst("div[q:key=tz_4]")?.textOrNull()
manga.copy(
altTitles = setOfNotNull(doc.selectFirst("div[q:key=tz_2]")?.textOrNull()),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirst("react-island[q:key=0a_9]")?.html(),
state = when (doc.selectFirst("span[q:key=Yn_5]")?.text()?.lowercase()) {
"ongoing" -> MangaState.ONGOING

@ -203,7 +203,7 @@ internal class MangaReaderToParser(context: MangaLoaderContext) :
else -> null
}
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = document.select("div.description").html(),
chapters = parseChapters(document),
source = source,

@ -140,7 +140,7 @@ internal abstract class NineMangaParser(
title = root.selectFirst("h1[itemprop=name]")?.textOrNull()?.removeSuffix("Manga")?.trimEnd()
?: manga.title,
tags = tags.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = parseStatus(infoRoot.select("li a.red").text()),
description = infoRoot.getElementsByAttributeValue("itemprop", "description").first()?.html()
?.substringAfter("</b>"),

@ -270,7 +270,7 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
altTitles = setOf(name),
coverUrl = cover.first,
largeCoverUrl = cover.second,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
contentRating = ContentRating.ADULT,
tags = tags?.mapToSet {
MangaTag(

@ -137,7 +137,7 @@ internal abstract class WebtoonsParser(
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOf(parseTag(jo.getJSONObject("genreInfo"))),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = jo.getString("synopsis"),
// I don't think the API provides this info,
state = null,
@ -170,7 +170,7 @@ internal abstract class WebtoonsParser(
title = jo.getString("title"),
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
altTitles = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
rating = jo.getFloatOrDefault("starScoreAverage", -10f) / 10f,
tags = setOfNotNull(allGenreCache.get()[jo.getString("representGenre")]),
@ -213,7 +213,7 @@ internal abstract class WebtoonsParser(
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = null,
tags = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = null,
state = null,
source = source,

@ -134,7 +134,7 @@ internal abstract class CupFoxParser(
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirst(selectMangaDescription)?.html(),
chapters = doc.select(selectMangaChapters)
.mapChapters { i, li ->

@ -114,7 +114,7 @@ internal class BeeToon(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = doc.select(".items-chapters a").mapChapters(reversed = true) { i, a ->
val url = a.attrAsRelativeUrl("href").toAbsoluteUrl(domain)
MangaChapter(

@ -119,7 +119,7 @@ internal class ComicExtra(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirstOrThrow("div.detail-desc-content p").html(),
chapters = doc.select("ul.basic-list li").let { elements ->
elements.mapChapters { i, li ->

@ -154,7 +154,7 @@ internal class FlameComics(context: MangaLoaderContext) :
"Ongoing" -> MangaState.ONGOING
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
largeCoverUrl = if (cover != null) {
imageUrl(seriesId, cover, 640)
} else {

@ -90,7 +90,7 @@ internal class MangaGeko(context: MangaLoaderContext) :
coverUrl = div.selectFirstOrThrow("img").src(),
tags = emptySet(),
state = null,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
source = source,
)
}
@ -125,7 +125,7 @@ internal class MangaGeko(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirstOrThrow(".description").html(),
chapters = chaptersDeferred.await(),
)

@ -105,7 +105,7 @@ internal class MangaKawaiiEn(context: MangaLoaderContext) :
altTitles = doc.select("span[itemprop*=alternativeHeadline]").mapNotNullToSet {
it.textOrNull()
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (doc.selectFirst("span.badge.bg-success.text-uppercase")?.text()) {
"Ongoing" -> MangaState.ONGOING
"" -> MangaState.FINISHED

@ -121,7 +121,7 @@ internal class MangaTownParser(context: MangaLoaderContext) :
altTitles = emptySet(),
rating = li.selectFirst("p.score")?.selectFirst("b")
?.ownText()?.toFloatOrNull()?.div(5f) ?: RATING_UNKNOWN,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (status) {
"ongoing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED

@ -168,7 +168,7 @@ internal class Manhwa18Com(context: MangaLoaderContext) :
cardInfoElement?.selectFirst("b:contains(Other names)")?.parent()?.ownTextOrNull()
?.removePrefix(": "),
),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = docs.selectFirst(".series-summary .summary-content")?.html(),
tags = tags.orEmpty(),
state = state,

@ -168,7 +168,7 @@ internal class Manhwa18Parser(context: MangaLoaderContext) :
cardInfoElement?.selectFirst("b:contains(Other names)")?.parent()?.ownTextOrNull()
?.removePrefix(": "),
),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = docs.selectFirst(".series-summary .summary-content")?.html(),
tags = tags.orEmpty(),
state = state,

@ -97,7 +97,7 @@ internal class MyComicList(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (doc.selectFirst("td:contains(Status:) + td a")?.text()?.lowercase()) {
"ongoing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED

@ -71,7 +71,7 @@ internal class Po2Scans(context: MangaLoaderContext) :
else -> null
},
tags = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirstOrThrow(".summary").html(),
chapters = doc.select(".chap-section .chap")
.mapChapters(reversed = true) { i, div ->

@ -145,7 +145,7 @@ internal class Pururin(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = listOf(
MangaChapter(
id = manga.id,

@ -211,7 +211,7 @@ internal class WeebCentral(context: MangaLoaderContext) : LegacyMangaParser(cont
"Hiatus" -> PAUSED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
largeCoverUrl = null,
chapters = null,
source = source,

@ -187,7 +187,7 @@ internal class TuMangaOnlineParser(context: MangaLoaderContext) : LegacyPagedMan
},
largeCoverUrl = contents.selectFirst(".book-thumbnail")?.attrAsAbsoluteUrlOrNull("src"),
state = parseStatus(contents.select("span.book-status").text().orEmpty()),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = if (doc.select("div.chapters").isEmpty()) {
doc.select(oneShotChapterListSelector).mapChapters(reversed = true) { _, item ->
oneShotChapterFromElement(item)

@ -126,7 +126,7 @@ internal abstract class FoolSlideParser(
manga.copy(
coverUrl = doc.selectFirst(".thumbnail img")?.src() ?: manga.coverUrl,
description = desc?.nullIfEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = chapters,
)
}

@ -160,7 +160,7 @@ internal class BentomangaParser(context: MangaLoaderContext) :
"En pause" -> MangaState.PAUSED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = run {
val input = root.selectFirst("input[name=\"limit\"]") ?: return@run parseChapters(root)
val max = input.attr("max").toInt()

@ -180,7 +180,7 @@ internal class LegacyScansParser(context: MangaLoaderContext) :
)
},
coverUrl = root.selectFirst("div.serieImg img")?.attrAsAbsoluteUrlOrNull("src"),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = root.selectFirst("div.serieDescription div")?.html(),
chapters = root.select("div.chapterList a")
.mapChapters(reversed = true) { i, a ->

@ -106,7 +106,7 @@ internal class LireScan(context: MangaLoaderContext) : LegacyPagedMangaParser(co
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = root.selectFirst("div.pmovie__text")?.html(),
chapters = root.select("ul li div.chapter")
.mapChapters(reversed = true) { i, div ->

@ -169,7 +169,7 @@ internal class LugnicaScans(context: MangaLoaderContext) :
"3" -> MangaState.ABANDONED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = jsonManga.getStringOrNull("description"),
chapters = chapters.mapChapters { i, it ->
val id = it.substringAfter("\"chapter\":").substringBefore(",")

@ -138,7 +138,7 @@ internal class ScantradUnion(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = root.selectFirst("p.sContent")?.html(),
chapters = root.select("div.chapter-list li")
.mapChapters(reversed = true) { i, li ->

@ -192,7 +192,7 @@ internal abstract class FuzzyDoodleParser(
in paused -> MangaState.PAUSED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.select(selectDescription).html(),
tags = doc.select(selectTagManga).mapToSet {
val key = it.attr("href").substringAfterLast('=')

@ -169,7 +169,7 @@ internal abstract class GalleryAdultsParser(
return manga.copy(
tags = tag.orEmpty(),
title = doc.selectFirst(selectTitle)?.textOrNull()?.cleanupTitle() ?: manga.title,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = listOf(
MangaChapter(
id = manga.id,

@ -129,7 +129,7 @@ internal class HentaiEra(context: MangaLoaderContext) :
val author = doc.selectFirst(selectAuthor)?.text()
return manga.copy(
tags = tag.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = listOf(
MangaChapter(
id = manga.id,

@ -116,7 +116,7 @@ internal abstract class GattsuParser(
description = doc.selectFirst("div.post-texto")?.html(),
tags = doc.selectFirst(".post-itens li:contains(Tags), .paginaPostInfo li:contains(Categorias)")
?.parseTags().orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
chapters = listOf(
MangaChapter(
id = manga.id,

@ -75,7 +75,7 @@ internal abstract class GuyaParser(
tags = emptySet(),
description = j.getString("description"),
state = null,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
source = source,
)

@ -143,7 +143,7 @@ internal class DoujinDesuParser(context: MangaLoaderContext) :
}
val author = metadataEl?.selectFirst("tr:contains(Author)")?.selectLast("td")?.text()
return manga.copy(
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = docs.selectFirst(".wrapper > .metadata > .pb-2")?.selectFirst("p")?.html(),
state = state,
rating = metadataEl?.selectFirst(".rating-prc")?.ownText()?.toFloatOrNull()?.div(10f) ?: RATING_UNKNOWN,

@ -101,7 +101,7 @@ internal class HentaiCrot(context: MangaLoaderContext) :
altTitles = setOfNotNull(
doc.selectFirst("div.entry-content ul li:contains(Alternative Name(s) :) em")?.textOrNull(),
),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = null,
chapters = listOf(
MangaChapter(

@ -101,7 +101,7 @@ internal class PixHentai(context: MangaLoaderContext) :
altTitles = setOfNotNull(
doc.selectFirst("div.entry-content ul li:contains(Alternative Name(s) :) em")?.textOrNull(),
),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = null,
chapters = listOf(
MangaChapter(

@ -113,7 +113,7 @@ internal abstract class IkenParser(
description = it.getString("postContent"),
rating = RATING_UNKNOWN,
tags = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (it.getString("seriesStatus")) {
"ONGOING" -> MangaState.ONGOING
"COMPLETED" -> MangaState.FINISHED

@ -90,7 +90,7 @@ internal class NicovideoSeigaParser(context: MangaLoaderContext) :
title = item.selectFirst(".mg_body > .title > a")?.text() ?: return@mapNotNull null,
coverUrl = item.selectFirst(".comic_icon > div > a > img")?.attrAsAbsoluteUrl("src"),
altTitles = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
rating = RATING_UNKNOWN,
url = href,
contentRating = null,

@ -159,7 +159,7 @@ internal abstract class LikeMangaParser(
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.requireElementById("summary_shortened").html(),
chapters = run {
if (maxPageChapter == 1) {

@ -161,7 +161,7 @@ internal abstract class LilianaParser(
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (doc.selectFirst("div.y6x11p i.fas.fa-rss + span.dt")?.text()?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING
in finished -> MangaState.FINISHED

@ -477,7 +477,7 @@ internal abstract class MadaraParser(
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (
summary?.selectFirst(".mg_status")
?.selectFirst(".summary-content")

@ -34,7 +34,7 @@ internal class FireScans(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (
summary?.selectFirst(".mg_status")
?.selectFirst(".summary-content")

@ -119,7 +119,7 @@ internal class Hentai4Free(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING

@ -93,7 +93,7 @@ internal class IsekaiScan(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase()) {
"ongoing" -> MangaState.ONGOING

@ -116,7 +116,7 @@ internal class IsekaiScanEuParser(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING

@ -91,7 +91,7 @@ internal class MangaDass(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING

@ -84,7 +84,7 @@ internal class MangaDna(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING

@ -13,145 +13,145 @@ import java.util.*
@Broken
@MangaSourceParser("MANGAPURE", "MangaPure", "en")
internal class MangaPure(context: MangaLoaderContext) :
MadaraParser(context, MangaParserSource.MANGAPURE, "mangapure.net") {
override val tagPrefix = "mangas/"
override val listUrl = "latest-manga/"
override val datePattern = "MMMM d, HH:mm"
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.POPULARITY,
SortOrder.UPDATED,
)
init {
paginator.firstPage = 1
searchPaginator.firstPage = 1
}
override suspend fun getFilterOptions(): MangaListFilterOptions {
return super.getFilterOptions().copy(
availableStates = emptySet(),
availableContentRating = emptySet(),
)
}
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/search?s=")
append(filter.query.urlEncoded())
append("&page=")
append(page.toString())
append("&post_type=wp-manga")
}
else -> {
val tag = filter.tags.oneOrThrowIfMany()
if (filter.tags.isNotEmpty()) {
append("/$tagPrefix")
append(tag?.key.orEmpty())
append("?orderby=")
when (order) {
SortOrder.POPULARITY -> append("2")
SortOrder.UPDATED -> append("3")
else -> append("3")
}
append("&page=")
append(page.toString())
} else {
when (order) {
SortOrder.POPULARITY -> append("/popular-manga")
SortOrder.UPDATED -> append("/latest-manga")
else -> append("/latest-manga")
}
append("?page=")
append(page.toString())
}
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select("div.row.c-tabs-item__content").ifEmpty {
doc.select("div.page-item-detail.manga")
}.map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
val summary = div.selectFirst(".tab-summary") ?: div.selectFirst(".item-summary")
val author = summary?.selectFirst(".mg_author")?.selectFirst("a")?.ownText()
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirst("img")?.src(),
title = (summary?.selectFirst("h3") ?: summary?.selectFirst("h4"))?.text().orEmpty(),
altTitles = emptySet(),
rating = div.selectFirst("span.total_votes")?.ownText()?.toFloatOrNull()?.div(5f) ?: -1f,
tags = summary?.selectFirst(".mg_genres")?.select("a")?.mapNotNullToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().ifEmpty { return@mapNotNullToSet null }.toTitleCase(),
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase()) {
"ongoing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED
else -> null
},
source = source,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
override suspend fun loadChapters(mangaUrl: String, document: Document): List<MangaChapter> {
val mangaId = document.select("div[id^=manga-chapters-holder]").attr("data-id")
val doc = webClient.httpGet("https://$domain/ajax-list-chapter?mangaID=$mangaId").parseHtml()
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
return doc.select(selectChapter).mapChapters(reversed = true) { i, li ->
val a = li.selectFirst("a")
val href = a?.attrAsRelativeUrlOrNull("href") ?: li.parseFailed("Link is missing")
val link = href + stylePage
MangaChapter(
id = generateUid(href),
url = link,
name = a.ownText(),
number = i + 1f,
volume = 0,
branch = null,
uploadDate = parseChapterDate(
dateFormat,
li.selectFirst(selectDate)?.text(),
),
scanlator = null,
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val urlarray = doc.select("p#arraydata").text().split(",").toTypedArray()
return urlarray.map { url ->
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
MadaraParser(context, MangaParserSource.MANGAPURE, "mangapure.net") {
override val tagPrefix = "mangas/"
override val listUrl = "latest-manga/"
override val datePattern = "MMMM d, HH:mm"
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.POPULARITY,
SortOrder.UPDATED,
)
init {
paginator.firstPage = 1
searchPaginator.firstPage = 1
}
override suspend fun getFilterOptions(): MangaListFilterOptions {
return super.getFilterOptions().copy(
availableStates = emptySet(),
availableContentRating = emptySet(),
)
}
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/search?s=")
append(filter.query.urlEncoded())
append("&page=")
append(page.toString())
append("&post_type=wp-manga")
}
else -> {
val tag = filter.tags.oneOrThrowIfMany()
if (filter.tags.isNotEmpty()) {
append("/$tagPrefix")
append(tag?.key.orEmpty())
append("?orderby=")
when (order) {
SortOrder.POPULARITY -> append("2")
SortOrder.UPDATED -> append("3")
else -> append("3")
}
append("&page=")
append(page.toString())
} else {
when (order) {
SortOrder.POPULARITY -> append("/popular-manga")
SortOrder.UPDATED -> append("/latest-manga")
else -> append("/latest-manga")
}
append("?page=")
append(page.toString())
}
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select("div.row.c-tabs-item__content").ifEmpty {
doc.select("div.page-item-detail.manga")
}.map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
val summary = div.selectFirst(".tab-summary") ?: div.selectFirst(".item-summary")
val author = summary?.selectFirst(".mg_author")?.selectFirst("a")?.ownText()
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirst("img")?.src(),
title = (summary?.selectFirst("h3") ?: summary?.selectFirst("h4"))?.text().orEmpty(),
altTitles = emptySet(),
rating = div.selectFirst("span.total_votes")?.ownText()?.toFloatOrNull()?.div(5f) ?: -1f,
tags = summary?.selectFirst(".mg_genres")?.select("a")?.mapNotNullToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().ifEmpty { return@mapNotNullToSet null }.toTitleCase(),
source = source,
)
}.orEmpty(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase()) {
"ongoing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED
else -> null
},
source = source,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
override suspend fun loadChapters(mangaUrl: String, document: Document): List<MangaChapter> {
val mangaId = document.select("div[id^=manga-chapters-holder]").attr("data-id")
val doc = webClient.httpGet("https://$domain/ajax-list-chapter?mangaID=$mangaId").parseHtml()
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
return doc.select(selectChapter).mapChapters(reversed = true) { i, li ->
val a = li.selectFirst("a")
val href = a?.attrAsRelativeUrlOrNull("href") ?: li.parseFailed("Link is missing")
val link = href + stylePage
MangaChapter(
id = generateUid(href),
url = link,
name = a.ownText(),
number = i + 1f,
volume = 0,
branch = null,
uploadDate = parseChapterDate(
dateFormat,
li.selectFirst(selectDate)?.text(),
),
scanlator = null,
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val urlarray = doc.select("p#arraydata").text().split(",").toTypedArray()
return urlarray.map { url ->
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
}

@ -8,96 +8,96 @@ import org.koitharu.kotatsu.parsers.util.*
@MangaSourceParser("MANHWAZ", "ManhwaZ", "en")
internal class Manhwaz(context: MangaLoaderContext) :
MadaraParser(context, MangaParserSource.MANHWAZ, "manhwaz.com", 40) {
MadaraParser(context, MangaParserSource.MANHWAZ, "manhwaz.com", 40) {
override val listUrl = "genre/manhwa"
override val tagPrefix = "genre/"
override val withoutAjax = true
override val selectTestAsync = "div.list-chapter"
override val listUrl = "genre/manhwa"
override val tagPrefix = "genre/"
override val withoutAjax = true
override val selectTestAsync = "div.list-chapter"
init {
paginator.firstPage = 1
searchPaginator.firstPage = 1
}
init {
paginator.firstPage = 1
searchPaginator.firstPage = 1
}
override suspend fun getFilterOptions() = super.getFilterOptions().copy(
availableStates = emptySet(),
availableContentRating = emptySet(),
)
override suspend fun getFilterOptions() = super.getFilterOptions().copy(
availableStates = emptySet(),
availableContentRating = emptySet(),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/search?s=")
append(filter.query.urlEncoded())
append("&page=")
append(page.toString())
}
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/search?s=")
append(filter.query.urlEncoded())
append("&page=")
append(page.toString())
}
else -> {
else -> {
val tag = filter.tags.oneOrThrowIfMany()
if (filter.tags.isNotEmpty()) {
append("/$tagPrefix")
append(tag?.key.orEmpty())
append("?page=")
append(page.toString())
append("&")
} else {
append("/$listUrl")
append("?page=")
append(page.toString())
append("&")
}
val tag = filter.tags.oneOrThrowIfMany()
if (filter.tags.isNotEmpty()) {
append("/$tagPrefix")
append(tag?.key.orEmpty())
append("?page=")
append(page.toString())
append("&")
} else {
append("/$listUrl")
append("?page=")
append(page.toString())
append("&")
}
append("m_orderby=")
when (order) {
SortOrder.POPULARITY -> append("views")
SortOrder.UPDATED -> append("latest")
SortOrder.NEWEST -> append("new")
SortOrder.RATING -> append("rating")
else -> append("latest")
}
}
}
}
val doc = webClient.httpGet(url).parseHtml()
append("m_orderby=")
when (order) {
SortOrder.POPULARITY -> append("views")
SortOrder.UPDATED -> append("latest")
SortOrder.NEWEST -> append("new")
SortOrder.RATING -> append("rating")
else -> append("latest")
}
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select("div.row.c-tabs-item__content").ifEmpty {
doc.select("div.page-item-detail")
}.map { div ->
val href = div.selectFirst("a")?.attrAsRelativeUrlOrNull("href") ?: div.parseFailed("Link not found")
val summary = div.selectFirst(".tab-summary") ?: div.selectFirst(".item-summary")
val author = summary?.selectFirst(".mg_author")?.selectFirst("a")?.ownText()
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirst("img")?.src(),
title = (summary?.selectFirst("h3") ?: summary?.selectFirst("h4"))?.text().orEmpty(),
altTitles = emptySet(),
rating = div.selectFirst("span.total_votes")?.ownText()?.toFloatOrNull()?.div(5f) ?: -1f,
tags = summary?.selectFirst(".mg_genres")?.select("a")?.mapNotNullToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().ifEmpty { return@mapNotNullToSet null }.toTitleCase(),
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING
in finished -> MangaState.FINISHED
else -> null
},
source = source,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
return doc.select("div.row.c-tabs-item__content").ifEmpty {
doc.select("div.page-item-detail")
}.map { div ->
val href = div.selectFirst("a")?.attrAsRelativeUrlOrNull("href") ?: div.parseFailed("Link not found")
val summary = div.selectFirst(".tab-summary") ?: div.selectFirst(".item-summary")
val author = summary?.selectFirst(".mg_author")?.selectFirst("a")?.ownText()
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirst("img")?.src(),
title = (summary?.selectFirst("h3") ?: summary?.selectFirst("h4"))?.text().orEmpty(),
altTitles = emptySet(),
rating = div.selectFirst("span.total_votes")?.ownText()?.toFloatOrNull()?.div(5f) ?: -1f,
tags = summary?.selectFirst(".mg_genres")?.select("a")?.mapNotNullToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().ifEmpty { return@mapNotNullToSet null }.toTitleCase(),
source = source,
)
}.orEmpty(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING
in finished -> MangaState.FINISHED
else -> null
},
source = source,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
}

@ -35,7 +35,7 @@ internal class ShibaManga(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (
summary?.selectFirst(".mg_status")
?.selectFirst(".summary-content")

@ -29,7 +29,7 @@ internal class MangasNoSekai(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = body.selectFirst("#section-sinopsis p")?.text().orEmpty(),
altTitles = setOfNotNull(
doc.selectFirst("section#section-sinopsis div.d-flex:has(div:contains(Otros nombres)) p")

@ -83,7 +83,7 @@ internal class ManhwaHub(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING

@ -39,7 +39,7 @@ internal class MangaFenxi(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (
summary?.selectFirst(".mg_status")
?.selectFirst(".summary-content")

@ -88,7 +88,7 @@ internal class Saytruyenhay(context: MangaLoaderContext) :
source = source,
)
}.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
?.lowercase().orEmpty()) {
in ongoing -> MangaState.ONGOING

@ -275,7 +275,7 @@ internal abstract class MangaReaderParser(
return manga.copy(
description = docs.selectFirst(detailsDescriptionSelector)?.text(),
state = mangaState,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
contentRating = if (manga.isNsfw || nsfw) {
ContentRating.ADULT
} else {

@ -110,7 +110,7 @@ internal class Normoyun(context: MangaLoaderContext) :
return manga.copy(
description = docs.selectFirst("span.desc")?.html(),
state = mangaState,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
contentRating = if (manga.isNsfw || nsfw) {
ContentRating.ADULT
} else {

@ -133,7 +133,7 @@ internal class RizzComic(context: MangaLoaderContext) :
"hiatus" -> MangaState.PAUSED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
source = source,
description = j.getString("long_description"),
)

@ -13,229 +13,229 @@ import java.util.*
@MangaSourceParser("KOMIKCAST", "KomikCast", "id")
internal class Komikcast(context: MangaLoaderContext) :
MangaReaderParser(context, MangaParserSource.KOMIKCAST, "komikcast.bz", pageSize = 60, searchPageSize = 28) {
override val listUrl = "/daftar-komik"
override val datePattern = "MMM d, yyyy"
override val sourceLocale: Locale = Locale.ENGLISH
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.UPDATED, SortOrder.POPULARITY, SortOrder.ALPHABETICAL)
override val filterCapabilities: MangaListFilterCapabilities
get() = super.filterCapabilities.copy(
isTagsExclusionSupported = false,
)
override suspend fun getFilterOptions() = super.getFilterOptions().copy(
availableStates = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/page/")
append(page.toString())
append("/?s=")
append(filter.query.urlEncoded())
}
else -> {
append(listUrl)
append("/page/")
append(page.toString())
append("/?type=")
append(
when (order) {
SortOrder.ALPHABETICAL -> "&orderby=titleasc"
SortOrder.ALPHABETICAL_DESC -> "&orderby=titledesc"
SortOrder.POPULARITY -> "&orderby=popular"
SortOrder.UPDATED -> "" // To get the Updated list, you don't need "orderby" in the url.
else -> ""
},
)
val tagKey = "genre[]".urlEncoded()
val tagQuery =
if (filter.tags.isEmpty()) ""
else filter.tags.joinToString(separator = "&", prefix = "&") { "$tagKey=${it.key}" }
append(tagQuery)
if (filter.states.isNotEmpty()) {
filter.states.oneOrThrowIfMany()?.let {
append("&status=")
when (it) {
MangaState.ONGOING -> append("Ongoing")
MangaState.FINISHED -> append("Completed")
else -> append("")
}
}
}
}
}
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
override suspend fun getDetails(manga: Manga): Manga {
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
val chapters = docs.select("#chapter-wrapper > li").mapChapters(reversed = true) { index, element ->
val url = element.selectFirst("a.chapter-link-item")?.attrAsRelativeUrl("href") ?: return@mapChapters null
MangaChapter(
id = generateUid(url),
name = element.selectFirst("a.chapter-link-item")?.ownText().orEmpty(),
url = url,
number = index + 1f,
volume = 0,
scanlator = null,
uploadDate = parseChapterDate(
dateFormat,
element.selectFirst("div.chapter-link-time")?.text(),
),
branch = null,
source = source,
)
}
return parseInfo(docs, manga, chapters)
}
override suspend fun parseInfo(docs: Document, manga: Manga, chapters: List<MangaChapter>): Manga {
val tagMap = getOrCreateTagMap()
val tags = docs.select(".komik_info-content-genre > a").mapNotNullToSet { tagMap[it.text()] }
val state = docs.selectFirst(".komik_info-content-meta span:contains(Status)")?.html()
val mangaState = if (state!!.contains("Ongoing")) {
MangaState.ONGOING
} else {
MangaState.FINISHED
}
val author = docs.selectFirst(".komik_info-content-meta span:contains(Author)")
?.lastElementChild()?.textOrNull()
val nsfw =
docs.selectFirst(".restrictcontainer") != null || docs.selectFirst(".info-right .alr") != null || docs.selectFirst(
".postbody .alr",
) != null
return manga.copy(
description = docs.selectFirst("div.komik_info-description-sinopsis")?.text(),
state = mangaState,
authors = author?.let { setOf(it) } ?: emptySet(),
contentRating = if (manga.isNsfw || nsfw) {
ContentRating.ADULT
} else {
ContentRating.SAFE
},
tags = tags,
chapters = chapters,
)
}
override fun parseMangaList(docs: Document): List<Manga> {
return docs.select("div.list-update_item").mapNotNull {
val a = it.selectFirstOrThrow("a.data-tooltip")
val relativeUrl = a.attrAsRelativeUrl("href")
val rating = it.selectFirst(".numscore")?.text()?.toFloatOrNull()?.div(10) ?: RATING_UNKNOWN
val name = it.selectFirst("h3.title")?.text().orEmpty()
Manga(
id = generateUid(relativeUrl),
url = relativeUrl,
title = name,
altTitles = emptySet(),
publicUrl = a.attrAsAbsoluteUrl("href"),
rating = rating,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = it.selectFirst("img.ts-post-image")?.src(),
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val chapterUrl = chapter.url.toAbsoluteUrl(domain)
val docs = webClient.httpGet(chapterUrl).parseHtml()
val test = docs.select("script:containsData(ts_reader)")
if (test.isNullOrEmpty()) {
return docs.select("div#chapter_body img").map { img ->
val url = img.requireSrc().toRelativeUrl(domain)
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
} else {
val script = docs.selectFirstOrThrow("script:containsData(ts_reader)")
val images = JSONObject(script.data().substringAfter('(').substringBeforeLast(')')).getJSONArray("sources")
.getJSONObject(0).getJSONArray("images")
val pages = ArrayList<MangaPage>(images.length())
for (i in 0 until images.length()) {
pages.add(
MangaPage(
id = generateUid(images.getString(i)),
url = images.getString(i),
preview = null,
source = source,
),
)
}
return pages
}
}
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
date ?: return 0
return when {
date.endsWith(" ago", ignoreCase = true) -> {
parseRelativeDate(date)
}
else -> dateFormat.tryParse(date)
}
}
private fun parseRelativeDate(date: String): Long {
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
val cal = Calendar.getInstance()
return when {
WordSet(
"day",
"days",
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
WordSet("hour", "hours").anyWordIn(date) -> cal.apply {
add(
Calendar.HOUR,
-number,
)
}.timeInMillis
WordSet(
"mins",
).anyWordIn(date) -> cal.apply {
add(
Calendar.MINUTE,
-number,
)
}.timeInMillis
WordSet("second").anyWordIn(date) -> cal.apply {
add(
Calendar.SECOND,
-number,
)
}.timeInMillis
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
else -> 0
}
}
MangaReaderParser(context, MangaParserSource.KOMIKCAST, "komikcast.bz", pageSize = 60, searchPageSize = 28) {
override val listUrl = "/daftar-komik"
override val datePattern = "MMM d, yyyy"
override val sourceLocale: Locale = Locale.ENGLISH
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.UPDATED, SortOrder.POPULARITY, SortOrder.ALPHABETICAL)
override val filterCapabilities: MangaListFilterCapabilities
get() = super.filterCapabilities.copy(
isTagsExclusionSupported = false,
)
override suspend fun getFilterOptions() = super.getFilterOptions().copy(
availableStates = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/page/")
append(page.toString())
append("/?s=")
append(filter.query.urlEncoded())
}
else -> {
append(listUrl)
append("/page/")
append(page.toString())
append("/?type=")
append(
when (order) {
SortOrder.ALPHABETICAL -> "&orderby=titleasc"
SortOrder.ALPHABETICAL_DESC -> "&orderby=titledesc"
SortOrder.POPULARITY -> "&orderby=popular"
SortOrder.UPDATED -> "" // To get the Updated list, you don't need "orderby" in the url.
else -> ""
},
)
val tagKey = "genre[]".urlEncoded()
val tagQuery =
if (filter.tags.isEmpty()) ""
else filter.tags.joinToString(separator = "&", prefix = "&") { "$tagKey=${it.key}" }
append(tagQuery)
if (filter.states.isNotEmpty()) {
filter.states.oneOrThrowIfMany()?.let {
append("&status=")
when (it) {
MangaState.ONGOING -> append("Ongoing")
MangaState.FINISHED -> append("Completed")
else -> append("")
}
}
}
}
}
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
override suspend fun getDetails(manga: Manga): Manga {
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
val chapters = docs.select("#chapter-wrapper > li").mapChapters(reversed = true) { index, element ->
val url = element.selectFirst("a.chapter-link-item")?.attrAsRelativeUrl("href") ?: return@mapChapters null
MangaChapter(
id = generateUid(url),
name = element.selectFirst("a.chapter-link-item")?.ownText().orEmpty(),
url = url,
number = index + 1f,
volume = 0,
scanlator = null,
uploadDate = parseChapterDate(
dateFormat,
element.selectFirst("div.chapter-link-time")?.text(),
),
branch = null,
source = source,
)
}
return parseInfo(docs, manga, chapters)
}
override suspend fun parseInfo(docs: Document, manga: Manga, chapters: List<MangaChapter>): Manga {
val tagMap = getOrCreateTagMap()
val tags = docs.select(".komik_info-content-genre > a").mapNotNullToSet { tagMap[it.text()] }
val state = docs.selectFirst(".komik_info-content-meta span:contains(Status)")?.html()
val mangaState = if (state!!.contains("Ongoing")) {
MangaState.ONGOING
} else {
MangaState.FINISHED
}
val author = docs.selectFirst(".komik_info-content-meta span:contains(Author)")
?.lastElementChild()?.textOrNull()
val nsfw =
docs.selectFirst(".restrictcontainer") != null || docs.selectFirst(".info-right .alr") != null || docs.selectFirst(
".postbody .alr",
) != null
return manga.copy(
description = docs.selectFirst("div.komik_info-description-sinopsis")?.text(),
state = mangaState,
authors = setOfNotNull(author),
contentRating = if (manga.isNsfw || nsfw) {
ContentRating.ADULT
} else {
ContentRating.SAFE
},
tags = tags,
chapters = chapters,
)
}
override fun parseMangaList(docs: Document): List<Manga> {
return docs.select("div.list-update_item").mapNotNull {
val a = it.selectFirstOrThrow("a.data-tooltip")
val relativeUrl = a.attrAsRelativeUrl("href")
val rating = it.selectFirst(".numscore")?.text()?.toFloatOrNull()?.div(10) ?: RATING_UNKNOWN
val name = it.selectFirst("h3.title")?.text().orEmpty()
Manga(
id = generateUid(relativeUrl),
url = relativeUrl,
title = name,
altTitles = emptySet(),
publicUrl = a.attrAsAbsoluteUrl("href"),
rating = rating,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = it.selectFirst("img.ts-post-image")?.src(),
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val chapterUrl = chapter.url.toAbsoluteUrl(domain)
val docs = webClient.httpGet(chapterUrl).parseHtml()
val test = docs.select("script:containsData(ts_reader)")
if (test.isNullOrEmpty()) {
return docs.select("div#chapter_body img").map { img ->
val url = img.requireSrc().toRelativeUrl(domain)
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
} else {
val script = docs.selectFirstOrThrow("script:containsData(ts_reader)")
val images = JSONObject(script.data().substringAfter('(').substringBeforeLast(')')).getJSONArray("sources")
.getJSONObject(0).getJSONArray("images")
val pages = ArrayList<MangaPage>(images.length())
for (i in 0 until images.length()) {
pages.add(
MangaPage(
id = generateUid(images.getString(i)),
url = images.getString(i),
preview = null,
source = source,
),
)
}
return pages
}
}
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
date ?: return 0
return when {
date.endsWith(" ago", ignoreCase = true) -> {
parseRelativeDate(date)
}
else -> dateFormat.tryParse(date)
}
}
private fun parseRelativeDate(date: String): Long {
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
val cal = Calendar.getInstance()
return when {
WordSet(
"day",
"days",
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
WordSet("hour", "hours").anyWordIn(date) -> cal.apply {
add(
Calendar.HOUR,
-number,
)
}.timeInMillis
WordSet(
"mins",
).anyWordIn(date) -> cal.apply {
add(
Calendar.MINUTE,
-number,
)
}.timeInMillis
WordSet("second").anyWordIn(date) -> cal.apply {
add(
Calendar.SECOND,
-number,
)
}.timeInMillis
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
else -> 0
}
}
}

@ -154,7 +154,7 @@ internal abstract class MangaWorldParser(
altTitles = emptySet(),
rating = RATING_UNKNOWN,
tags = tags,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state =
when (div.selectFirst(".status a")?.text()?.lowercase()) {
"in corso" -> MangaState.ONGOING

@ -68,7 +68,7 @@ internal class Onma(context: MangaLoaderContext) :
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = desc,
altTitles = setOfNotNull(alt),
state = state,

@ -17,294 +17,294 @@ import java.util.*
internal abstract class NepnepParser(
context: MangaLoaderContext,
source: MangaParserSource,
domain: String,
context: MangaLoaderContext,
source: MangaParserSource,
domain: String,
) : LegacyMangaParser(context, source) {
override val configKeyDomain = ConfigKey.Domain(domain)
override val userAgentKey = ConfigKey.UserAgent(UserAgents.CHROME_DESKTOP)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.ALPHABETICAL, SortOrder.POPULARITY, SortOrder.UPDATED)
private val searchDoc = suspendLazy(soft = true) {
webClient.httpGet("https://$domain/search/").parseHtml()
}
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableStates = EnumSet.allOf(MangaState::class.java),
)
data class MangaWithLastUpdate(
val manga: Manga,
val lastUpdate: Long,
val views: String,
)
override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val doc = searchDoc.get()
val json = JSONArray(
doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
.substringAfter("vm.Directory = ")
.substringBefore("vm.GetIntValue")
.trim()
.replace(';', ' '),
)
val mangaWithLastUpdateList = ArrayList<MangaWithLastUpdate>(json.length())
var sort = false
for (i in 0 until json.length()) {
val m = json.getJSONObject(i)
val href = "/manga/" + m.getString("i")
val imgUrl = "https://temp.compsci88.com/cover/" + m.getString("i") + ".jpg"
val lastUpdate = m.getLong("lt")
val views = m.getString("v")
//val viewMonth = m.getString("vm")
when {
!filter.query.isNullOrEmpty() -> {
if (m.getString("s").contains(filter.query, ignoreCase = true) || (m.getJSONArray("al")
.length() > 0 && m.getJSONArray("al").getString(0)
.contains(filter.query, ignoreCase = true))
) {
mangaWithLastUpdateList.add(
MangaWithLastUpdate(addManga(href, imgUrl, m), lastUpdate, views),
)
}
}
else -> {
val tags = filter.tags
val tagsExcluded = filter.tagsExclude
val tagsJson = m.getJSONArray("g").toString()
val tagsMatched =
tags.isEmpty() || tags.all { tag -> tagsJson.contains(tag.key, ignoreCase = true) }
val tagsExcludeMatched = tagsExcluded.isEmpty() || tagsExcluded.none { tag ->
tagsJson.contains(
tag.key,
ignoreCase = true,
)
}
val statesMatched = filter.states.isEmpty() || filter.states.any { state ->
m.getString("ps").contains(
when (state) {
MangaState.ONGOING -> "Ongoing"
MangaState.FINISHED -> "Complete"
MangaState.ABANDONED -> "Cancelled"
MangaState.PAUSED -> "Hiatus"
else -> ""
},
ignoreCase = true,
)
}
if (tagsMatched && tagsExcludeMatched && statesMatched) {
mangaWithLastUpdateList.add(
MangaWithLastUpdate(addManga(href, imgUrl, m), lastUpdate, views),
)
}
sort = true
}
}
}
if (sort) {
when (order) {
SortOrder.POPULARITY -> mangaWithLastUpdateList.sortByDescending { it.views }
SortOrder.UPDATED -> mangaWithLastUpdateList.sortByDescending { it.lastUpdate }
SortOrder.ALPHABETICAL -> {}
else -> throw IllegalArgumentException("Unsupported sort order: $order")
}
}
return mangaWithLastUpdateList.map { it.manga }
.subList(offset, (offset + 30).coerceAtMost(mangaWithLastUpdateList.size))
}
private fun addManga(href: String, imgUrl: String, m: JSONObject): Manga {
return Manga(
id = generateUid(href),
title = m.getString("i").replace('-', ' '),
altTitles = emptySet(),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = null,
coverUrl = imgUrl,
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val doc = searchDoc.get()
val tags = doc.selectFirstOrThrow("script:containsData(vm.AvailableFilters)").data()
.substringAfter("\"Genre\"")
.substringAfter('[')
.substringBefore(']')
.replace("'", "")
.split(',')
return tags.mapToSet { tag ->
MangaTag(
key = tag,
title = tag,
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val chapter = JSONArray(
JSONArray(
doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
.substringAfter("vm.Chapters = ")
.substringBefore(';'),
).asTypedList<JSONObject>().reversed(),
)
val dateFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:SS", sourceLocale)
val author = doc.select(".list-group-item:contains(Author(s):) a").textOrNull()
return manga.copy(
state = when (doc.selectFirstOrThrow(".list-group-item:contains(Status:) a").text()) {
"Ongoing (Scan)", "Ongoing (Publish)",
-> MangaState.ONGOING
"Complete (Scan)", "Complete (Publish)",
-> MangaState.FINISHED
"Cancelled (Scan)", "Cancelled (Publish)",
"Discontinued (Scan)", "Discontinued (Publish)",
-> MangaState.ABANDONED
"Hiatus (Scan)", "Hiatus (Publish)",
-> MangaState.PAUSED
else -> null
},
tags = doc.select(".list-group-item:contains(Genre(s):) a").mapToSet { a ->
MangaTag(
key = a.attr("href").substringAfterLast('='),
title = a.text().toTitleCase(sourceLocale),
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
description = doc.selectFirstOrThrow(".top-5.Content").textOrNull(),
chapters = chapter.mapJSONIndexed { i, j ->
val indexChapter = j.getString("Chapter")!!
val url = "/read-online/" + manga.url.substringAfter("/manga/") + chapterURLEncode(indexChapter)
val name = j.getStringOrNull("ChapterName").let {
if (it.isNullOrEmpty() || it == "null") "${j.getString("Type")} ${
chapterImage(
indexChapter,
true,
)
}" else it
}
val date = j.getStringOrNull("Date")
MangaChapter(
id = generateUid(url),
name = name,
number = i + 1f,
volume = 0,
url = url,
scanlator = null,
uploadDate = dateFormat.tryParse(date),
branch = null,
source = source,
)
},
)
}
private fun chapterURLEncode(e: String): String {
var index = ""
val t = e.substring(0, 1).toInt()
if (1 != t) {
index = "-index-$t"
}
val ei = e.toInt()
val dgt = when {
ei < 100100 -> 4
ei < 101000 -> 3
ei < 110000 -> 2
else -> 1
}
val n = e.substring(dgt, e.length - 1)
var suffix = ""
val path = e.substring(e.length - 1).toInt()
if (0 != path) {
suffix = ".$path"
}
return "-chapter-$n$suffix$index.html"
}
private val chapterImageRegex = Regex("""^0+""")
private fun chapterImage(e: String, cleanString: Boolean = false): String {
// cleanString will result in an empty string if chapter number is 0, hence the else if below
val a = e.substring(1, e.length - 1).let { if (cleanString) it.replace(chapterImageRegex, "") else it }
// If b is not zero, indicates chapter has decimal numbering
val b = e.substring(e.length - 1).toInt()
return when {
b == 0 && a.isNotEmpty() -> a
b == 0 && a.isEmpty() -> "0"
else -> "$a.$b"
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val script = doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
val curChapter = JSONObject(
doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
.substringAfter("vm.CurChapter = ")
.substringBefore(';'),
)
val pageTotal = curChapter.getString("Page")!!.toInt()
val host = "https://" + script
.substringAfter("vm.CurPathName = \"", "")
.substringBefore('"')
check(host.isNotEmpty()) {
"Manga4Life is overloaded and blocking Kotatsu right now. Wait for unblock."
}
val titleURI = script.substringAfter("vm.IndexName = \"").substringBefore("\"")
val seasonURI = curChapter.getString("Directory")!!.let { if (it.isEmpty()) "" else "$it/" }
val path = "$host/manga/$titleURI/$seasonURI"
val chNum = chapterImage(curChapter.getString("Chapter")!!)
return IntRange(1, pageTotal).mapIndexed { i, _ ->
val imageNum = (i + 1).toString().let { "000$it" }.let { it.substring(it.length - 3) }
val url = "$path$chNum-$imageNum.png"
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override val configKeyDomain = ConfigKey.Domain(domain)
override val userAgentKey = ConfigKey.UserAgent(UserAgents.CHROME_DESKTOP)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.ALPHABETICAL, SortOrder.POPULARITY, SortOrder.UPDATED)
private val searchDoc = suspendLazy(soft = true) {
webClient.httpGet("https://$domain/search/").parseHtml()
}
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableStates = EnumSet.allOf(MangaState::class.java),
)
data class MangaWithLastUpdate(
val manga: Manga,
val lastUpdate: Long,
val views: String,
)
override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val doc = searchDoc.get()
val json = JSONArray(
doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
.substringAfter("vm.Directory = ")
.substringBefore("vm.GetIntValue")
.trim()
.replace(';', ' '),
)
val mangaWithLastUpdateList = ArrayList<MangaWithLastUpdate>(json.length())
var sort = false
for (i in 0 until json.length()) {
val m = json.getJSONObject(i)
val href = "/manga/" + m.getString("i")
val imgUrl = "https://temp.compsci88.com/cover/" + m.getString("i") + ".jpg"
val lastUpdate = m.getLong("lt")
val views = m.getString("v")
//val viewMonth = m.getString("vm")
when {
!filter.query.isNullOrEmpty() -> {
if (m.getString("s").contains(filter.query, ignoreCase = true) || (m.getJSONArray("al")
.length() > 0 && m.getJSONArray("al").getString(0)
.contains(filter.query, ignoreCase = true))
) {
mangaWithLastUpdateList.add(
MangaWithLastUpdate(addManga(href, imgUrl, m), lastUpdate, views),
)
}
}
else -> {
val tags = filter.tags
val tagsExcluded = filter.tagsExclude
val tagsJson = m.getJSONArray("g").toString()
val tagsMatched =
tags.isEmpty() || tags.all { tag -> tagsJson.contains(tag.key, ignoreCase = true) }
val tagsExcludeMatched = tagsExcluded.isEmpty() || tagsExcluded.none { tag ->
tagsJson.contains(
tag.key,
ignoreCase = true,
)
}
val statesMatched = filter.states.isEmpty() || filter.states.any { state ->
m.getString("ps").contains(
when (state) {
MangaState.ONGOING -> "Ongoing"
MangaState.FINISHED -> "Complete"
MangaState.ABANDONED -> "Cancelled"
MangaState.PAUSED -> "Hiatus"
else -> ""
},
ignoreCase = true,
)
}
if (tagsMatched && tagsExcludeMatched && statesMatched) {
mangaWithLastUpdateList.add(
MangaWithLastUpdate(addManga(href, imgUrl, m), lastUpdate, views),
)
}
sort = true
}
}
}
if (sort) {
when (order) {
SortOrder.POPULARITY -> mangaWithLastUpdateList.sortByDescending { it.views }
SortOrder.UPDATED -> mangaWithLastUpdateList.sortByDescending { it.lastUpdate }
SortOrder.ALPHABETICAL -> {}
else -> throw IllegalArgumentException("Unsupported sort order: $order")
}
}
return mangaWithLastUpdateList.map { it.manga }
.subList(offset, (offset + 30).coerceAtMost(mangaWithLastUpdateList.size))
}
private fun addManga(href: String, imgUrl: String, m: JSONObject): Manga {
return Manga(
id = generateUid(href),
title = m.getString("i").replace('-', ' '),
altTitles = emptySet(),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = null,
coverUrl = imgUrl,
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val doc = searchDoc.get()
val tags = doc.selectFirstOrThrow("script:containsData(vm.AvailableFilters)").data()
.substringAfter("\"Genre\"")
.substringAfter('[')
.substringBefore(']')
.replace("'", "")
.split(',')
return tags.mapToSet { tag ->
MangaTag(
key = tag,
title = tag,
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val chapter = JSONArray(
JSONArray(
doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
.substringAfter("vm.Chapters = ")
.substringBefore(';'),
).asTypedList<JSONObject>().reversed(),
)
val dateFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:SS", sourceLocale)
val author = doc.select(".list-group-item:contains(Author(s):) a").textOrNull()
return manga.copy(
state = when (doc.selectFirstOrThrow(".list-group-item:contains(Status:) a").text()) {
"Ongoing (Scan)", "Ongoing (Publish)",
-> MangaState.ONGOING
"Complete (Scan)", "Complete (Publish)",
-> MangaState.FINISHED
"Cancelled (Scan)", "Cancelled (Publish)",
"Discontinued (Scan)", "Discontinued (Publish)",
-> MangaState.ABANDONED
"Hiatus (Scan)", "Hiatus (Publish)",
-> MangaState.PAUSED
else -> null
},
tags = doc.select(".list-group-item:contains(Genre(s):) a").mapToSet { a ->
MangaTag(
key = a.attr("href").substringAfterLast('='),
title = a.text().toTitleCase(sourceLocale),
source = source,
)
},
authors = setOfNotNull(author),
description = doc.selectFirstOrThrow(".top-5.Content").textOrNull(),
chapters = chapter.mapJSONIndexed { i, j ->
val indexChapter = j.getString("Chapter")!!
val url = "/read-online/" + manga.url.substringAfter("/manga/") + chapterURLEncode(indexChapter)
val name = j.getStringOrNull("ChapterName").let {
if (it.isNullOrEmpty() || it == "null") "${j.getString("Type")} ${
chapterImage(
indexChapter,
true,
)
}" else it
}
val date = j.getStringOrNull("Date")
MangaChapter(
id = generateUid(url),
name = name,
number = i + 1f,
volume = 0,
url = url,
scanlator = null,
uploadDate = dateFormat.tryParse(date),
branch = null,
source = source,
)
},
)
}
private fun chapterURLEncode(e: String): String {
var index = ""
val t = e.substring(0, 1).toInt()
if (1 != t) {
index = "-index-$t"
}
val ei = e.toInt()
val dgt = when {
ei < 100100 -> 4
ei < 101000 -> 3
ei < 110000 -> 2
else -> 1
}
val n = e.substring(dgt, e.length - 1)
var suffix = ""
val path = e.substring(e.length - 1).toInt()
if (0 != path) {
suffix = ".$path"
}
return "-chapter-$n$suffix$index.html"
}
private val chapterImageRegex = Regex("""^0+""")
private fun chapterImage(e: String, cleanString: Boolean = false): String {
// cleanString will result in an empty string if chapter number is 0, hence the else if below
val a = e.substring(1, e.length - 1).let { if (cleanString) it.replace(chapterImageRegex, "") else it }
// If b is not zero, indicates chapter has decimal numbering
val b = e.substring(e.length - 1).toInt()
return when {
b == 0 && a.isNotEmpty() -> a
b == 0 && a.isEmpty() -> "0"
else -> "$a.$b"
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val script = doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
val curChapter = JSONObject(
doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
.substringAfter("vm.CurChapter = ")
.substringBefore(';'),
)
val pageTotal = curChapter.getString("Page")!!.toInt()
val host = "https://" + script
.substringAfter("vm.CurPathName = \"", "")
.substringBefore('"')
check(host.isNotEmpty()) {
"Manga4Life is overloaded and blocking Kotatsu right now. Wait for unblock."
}
val titleURI = script.substringAfter("vm.IndexName = \"").substringBefore("\"")
val seasonURI = curChapter.getString("Directory")!!.let { if (it.isEmpty()) "" else "$it/" }
val path = "$host/manga/$titleURI/$seasonURI"
val chNum = chapterImage(curChapter.getString("Chapter")!!)
return IntRange(1, pageTotal).mapIndexed { i, _ ->
val imageNum = (i + 1).toString().let { "000$it" }.let { it.substring(it.length - 3) }
val url = "$path$chNum-$imageNum.png"
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
}

@ -175,7 +175,7 @@ internal abstract class OtakuSanctuaryParser(
},
description = desc,
altTitles = setOfNotNull(alt),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = state,
chapters = doc.body().requireElementById("chapter").select("tr.chapter")
.mapChapters(reversed = true) { i, tr ->

@ -196,7 +196,7 @@ internal abstract class PizzaReaderParser(
rating = j.getString("rating").toFloatOrNull()?.div(10f)
?: RATING_UNKNOWN,
tags = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (j.getString("status").lowercase()) {
in ongoing -> MangaState.ONGOING
in finished -> MangaState.FINISHED

@ -14,162 +14,162 @@ import java.util.*
@MangaSourceParser("BRMANGAS", "BrMangas", "pt")
internal class BrMangas(context: MangaLoaderContext) : LegacyPagedMangaParser(context, MangaParserSource.BRMANGAS, 25) {
override val configKeyDomain = ConfigKey.Domain("www.brmangas.net")
override val userAgentKey = ConfigKey.UserAgent(UserAgents.CHROME_DESKTOP)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.POPULARITY, SortOrder.UPDATED)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
append('/')
when {
!filter.query.isNullOrEmpty() -> {
if (page > 1) {
append("/page/$page/")
}
append("/?s=")
append(filter.query.urlEncoded())
}
else -> {
if (filter.tags.isNotEmpty()) {
filter.tags.oneOrThrowIfMany()?.let {
append("category/")
append(it.key)
if (page > 1) {
append("/page/$page/")
}
}
} else {
when (order) {
SortOrder.POPULARITY -> append("/")
SortOrder.UPDATED -> append("manga/")
else -> append("manga/")
}
if (page > 1) {
append("page/$page/")
}
}
}
}
}
val doc = webClient.httpGet(url).parseHtml()
val item = when {
!filter.query.isNullOrEmpty() -> {
doc.select("div.listagem div.item")
}
else -> {
if (order == SortOrder.POPULARITY && filter.tags.isEmpty()) {
doc.select("div.listagem")[1].select("div.item") // To remove the 6 mangas updated on the home page
} else {
doc.select("div.listagem div.item")
}
}
}
return item.map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
title = div.selectFirstOrThrow("h2").text(),
altTitles = emptySet(),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = null,
coverUrl = div.selectFirstOrThrow("img").src(),
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain/lista-de-generos-de-manga/").parseHtml()
return doc.select(".genres_page a").mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().toTitleCase(sourceLocale),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val author = doc.select("div.serie-infos li:contains(Autor:)").text().replace("Autor:", "").nullIfEmpty()
return manga.copy(
tags = doc.select("div.serie-infos li:contains(Categorias:) a").mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().toTitleCase(sourceLocale),
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
description = doc.select(".serie-texto p").html(),
contentRating = if (doc.select("div.serie-infos li:contains(Categorias:)").text().contains("Hentai")) {
ContentRating.ADULT
} else {
manga.contentRating
},
chapters = doc.select(".capitulos li a")
.mapChapters { i, a ->
val url = a.attrAsRelativeUrl("href")
val name = a.text()
MangaChapter(
id = generateUid(url),
name = name,
number = i + 1f,
volume = 0,
url = url,
scanlator = null,
uploadDate = 0,
branch = null,
source = source,
)
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val scriptData =
doc.selectFirstOrThrow("script:containsData(imageArray)").data().substringAfter('[').substringBefore(']')
.split(",")
return scriptData.map { data ->
val url = data.replace("\\\"", "").replace("\\/", "/")
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override val configKeyDomain = ConfigKey.Domain("www.brmangas.net")
override val userAgentKey = ConfigKey.UserAgent(UserAgents.CHROME_DESKTOP)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.POPULARITY, SortOrder.UPDATED)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
append('/')
when {
!filter.query.isNullOrEmpty() -> {
if (page > 1) {
append("/page/$page/")
}
append("/?s=")
append(filter.query.urlEncoded())
}
else -> {
if (filter.tags.isNotEmpty()) {
filter.tags.oneOrThrowIfMany()?.let {
append("category/")
append(it.key)
if (page > 1) {
append("/page/$page/")
}
}
} else {
when (order) {
SortOrder.POPULARITY -> append("/")
SortOrder.UPDATED -> append("manga/")
else -> append("manga/")
}
if (page > 1) {
append("page/$page/")
}
}
}
}
}
val doc = webClient.httpGet(url).parseHtml()
val item = when {
!filter.query.isNullOrEmpty() -> {
doc.select("div.listagem div.item")
}
else -> {
if (order == SortOrder.POPULARITY && filter.tags.isEmpty()) {
doc.select("div.listagem")[1].select("div.item") // To remove the 6 mangas updated on the home page
} else {
doc.select("div.listagem div.item")
}
}
}
return item.map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
title = div.selectFirstOrThrow("h2").text(),
altTitles = emptySet(),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = null,
coverUrl = div.selectFirstOrThrow("img").src(),
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain/lista-de-generos-de-manga/").parseHtml()
return doc.select(".genres_page a").mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().toTitleCase(sourceLocale),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val author = doc.select("div.serie-infos li:contains(Autor:)").text().replace("Autor:", "").nullIfEmpty()
return manga.copy(
tags = doc.select("div.serie-infos li:contains(Categorias:) a").mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().toTitleCase(sourceLocale),
source = source,
)
},
authors = setOfNotNull(author),
description = doc.select(".serie-texto p").html(),
contentRating = if (doc.select("div.serie-infos li:contains(Categorias:)").text().contains("Hentai")) {
ContentRating.ADULT
} else {
manga.contentRating
},
chapters = doc.select(".capitulos li a")
.mapChapters { i, a ->
val url = a.attrAsRelativeUrl("href")
val name = a.text()
MangaChapter(
id = generateUid(url),
name = name,
number = i + 1f,
volume = 0,
url = url,
scanlator = null,
uploadDate = 0,
branch = null,
source = source,
)
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val scriptData =
doc.selectFirstOrThrow("script:containsData(imageArray)").data().substringAfter('[').substringBefore(']')
.split(",")
return scriptData.map { data ->
val url = data.replace("\\\"", "").replace("\\/", "/")
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
}

@ -20,143 +20,143 @@ import java.util.zip.ZipInputStream
@Broken // Not dead but changed template
@MangaSourceParser("RANDOMSCANS", "LuratoonScan", "pt")
internal class LuratoonScansParser(context: MangaLoaderContext) :
LegacySinglePageMangaParser(context, MangaParserSource.RANDOMSCANS),
Interceptor {
LegacySinglePageMangaParser(context, MangaParserSource.RANDOMSCANS),
Interceptor {
override val configKeyDomain = ConfigKey.Domain("luratoons.com")
override val configKeyDomain = ConfigKey.Domain("luratoons.com")
override fun getRequestHeaders(): Headers = Headers.Builder().add("User-Agent", config[userAgentKey]).build()
override fun getRequestHeaders(): Headers = Headers.Builder().add("User-Agent", config[userAgentKey]).build()
override val availableSortOrders = setOf(SortOrder.ALPHABETICAL)
override val availableSortOrders = setOf(SortOrder.ALPHABETICAL)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities()
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities()
override suspend fun getFilterOptions() = MangaListFilterOptions()
override suspend fun getFilterOptions() = MangaListFilterOptions()
override suspend fun getList(order: SortOrder, filter: MangaListFilter): List<Manga> {
require(filter.query.isNullOrEmpty()) { ErrorMessages.SEARCH_NOT_SUPPORTED }
val url = urlBuilder()
val tag = filter.tags.oneOrThrowIfMany()
if (tag == null) {
url.addPathSegment("todas-as-obras")
} else {
url.addPathSegment("pesquisar").addQueryParameter("category", tag.key)
}
val doc = webClient.httpGet(url.build()).parseHtml()
return doc.selectFirstOrThrow(".todas__as__obras").select(".comics__all__box").map { div ->
val a = div.selectFirstOrThrow("a")
val href = a.attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirst("img")?.src().orEmpty(),
title = div.text(),
altTitles = emptySet(),
rating = RATING_UNKNOWN,
tags = emptySet(),
authors = emptySet(),
state = null,
source = source,
contentRating = null,
)
}
}
override suspend fun getList(order: SortOrder, filter: MangaListFilter): List<Manga> {
require(filter.query.isNullOrEmpty()) { ErrorMessages.SEARCH_NOT_SUPPORTED }
val url = urlBuilder()
val tag = filter.tags.oneOrThrowIfMany()
if (tag == null) {
url.addPathSegment("todas-as-obras")
} else {
url.addPathSegment("pesquisar").addQueryParameter("category", tag.key)
}
val doc = webClient.httpGet(url.build()).parseHtml()
return doc.selectFirstOrThrow(".todas__as__obras").select(".comics__all__box").map { div ->
val a = div.selectFirstOrThrow("a")
val href = a.attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirst("img")?.src().orEmpty(),
title = div.text(),
altTitles = emptySet(),
rating = RATING_UNKNOWN,
tags = emptySet(),
authors = emptySet(),
state = null,
source = source,
contentRating = null,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml().body()
val summaryContainer = doc.selectFirstOrThrow(".sumario__container")
// 1 de Maio de 2024 às 20:15
val dateFormat = SimpleDateFormat("dd 'de' MMM 'de' YYYY 'às' HH:mm", sourceLocale)
val author = summaryContainer.getElementsContainingOwnText("Autor(es)").firstOrNull()
?.nextElementSibling()?.textOrNull()
return manga.copy(
title = doc.selectFirst("h1.desc__titulo__comic")?.textOrNull() ?: manga.title,
altTitles = setOfNotNull(
summaryContainer.getElementsContainingOwnText("Alternativo").firstOrNull()
?.nextElementSibling()?.textOrNull(),
),
tags = summaryContainer.getElementsByAttributeValueContaining("href", "?category=").mapToSet {
MangaTag(
title = it.text().toTitleCase(sourceLocale),
key = it.attr("href").substringAfterLast('='),
source = source,
)
},
state = when (summaryContainer.getElementsContainingOwnText("Status").firstOrNull()
?.nextElementSibling()?.text()?.lowercase()) {
"em lançamento" -> MangaState.ONGOING
"hiato" -> MangaState.PAUSED
"finalizado" -> MangaState.FINISHED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
largeCoverUrl = doc.selectFirst("img.sumario__img")?.attrAsAbsoluteUrlOrNull("src"),
description = summaryContainer.selectFirst(".sumario__sinopse__texto")?.html(),
chapters = doc.selectFirstOrThrow("ul.capitulos__lista")
.select("li")
.mapChapters(reversed = true) { _, li ->
val href = li.parent()?.attrAsRelativeUrlOrNull("href") ?: return@mapChapters null
val span = li.selectFirstOrThrow(".numero__capitulo")
MangaChapter(
id = generateUid(href),
name = span.text(),
number = 0.0f,
volume = 0,
url = href,
scanlator = null,
uploadDate = dateFormat.tryParse(span.nextElementSibling()?.text()),
branch = null,
source = source,
)
},
)
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml().body()
val summaryContainer = doc.selectFirstOrThrow(".sumario__container")
// 1 de Maio de 2024 às 20:15
val dateFormat = SimpleDateFormat("dd 'de' MMM 'de' YYYY 'às' HH:mm", sourceLocale)
val author = summaryContainer.getElementsContainingOwnText("Autor(es)").firstOrNull()
?.nextElementSibling()?.textOrNull()
return manga.copy(
title = doc.selectFirst("h1.desc__titulo__comic")?.textOrNull() ?: manga.title,
altTitles = setOfNotNull(
summaryContainer.getElementsContainingOwnText("Alternativo").firstOrNull()
?.nextElementSibling()?.textOrNull(),
),
tags = summaryContainer.getElementsByAttributeValueContaining("href", "?category=").mapToSet {
MangaTag(
title = it.text().toTitleCase(sourceLocale),
key = it.attr("href").substringAfterLast('='),
source = source,
)
},
state = when (summaryContainer.getElementsContainingOwnText("Status").firstOrNull()
?.nextElementSibling()?.text()?.lowercase()) {
"em lançamento" -> MangaState.ONGOING
"hiato" -> MangaState.PAUSED
"finalizado" -> MangaState.FINISHED
else -> null
},
authors = setOfNotNull(author),
largeCoverUrl = doc.selectFirst("img.sumario__img")?.attrAsAbsoluteUrlOrNull("src"),
description = summaryContainer.selectFirst(".sumario__sinopse__texto")?.html(),
chapters = doc.selectFirstOrThrow("ul.capitulos__lista")
.select("li")
.mapChapters(reversed = true) { _, li ->
val href = li.parent()?.attrAsRelativeUrlOrNull("href") ?: return@mapChapters null
val span = li.selectFirstOrThrow(".numero__capitulo")
MangaChapter(
id = generateUid(href),
name = span.text(),
number = 0.0f,
volume = 0,
url = href,
scanlator = null,
uploadDate = dateFormat.tryParse(span.nextElementSibling()?.text()),
branch = null,
source = source,
)
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
val regex = Regex("const\\s+urls\\s*=\\s*(\\[.*])")
val urls = doc.select("script").firstNotNullOf {
regex.find(it.data())?.groupValues?.getOrNull(1)
}
val ja = JSONArray(urls)
return (0 until ja.length()).map { i ->
val url = ja.getString(i)
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
val regex = Regex("const\\s+urls\\s*=\\s*(\\[.*])")
val urls = doc.select("script").firstNotNullOf {
regex.find(it.data())?.groupValues?.getOrNull(1)
}
val ja = JSONArray(urls)
return (0 until ja.length()).map { i ->
val url = ja.getString(i)
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override fun intercept(chain: Interceptor.Chain): Response {
val response = chain.proceed(chain.request())
if (response.mimeType == "application/octet-stream") {
val (bytes, name) = response.use { resp ->
ZipInputStream(resp.requireBody().byteStream()).use {
val entry = it.nextEntry
it.readBytes() to entry?.name
}
}
val type = if (name?.endsWith(".avif", ignoreCase = true) == true) {
"image/avif"
} else {
"image/*"
}.toMediaTypeOrNull()
return response.newBuilder()
.setHeader("Content-Type", type?.toString())
.body(bytes.toResponseBody(type))
.build()
} else {
return response
}
}
override fun intercept(chain: Interceptor.Chain): Response {
val response = chain.proceed(chain.request())
if (response.mimeType == "application/octet-stream") {
val (bytes, name) = response.use { resp ->
ZipInputStream(resp.requireBody().byteStream()).use {
val entry = it.nextEntry
it.readBytes() to entry?.name
}
}
val type = if (name?.endsWith(".avif", ignoreCase = true) == true) {
"image/avif"
} else {
"image/*"
}.toMediaTypeOrNull()
return response.newBuilder()
.setHeader("Content-Type", type?.toString())
.body(bytes.toResponseBody(type))
.build()
} else {
return response
}
}
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
}

@ -108,7 +108,7 @@ internal class YugenMangas(context: MangaLoaderContext) :
description = detailManga.getString("synopsis"),
coverUrl = detailManga.getString("cover"),
altTitles = setOfNotNull(detailManga.getStringOrNull("alternative_names")),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = detailManga.getStringOrNull("status")?.let {
when (it) {
"ongoing" -> MangaState.ONGOING

@ -14,170 +14,170 @@ import java.util.*
@MangaSourceParser("ACOMICS", "AComics", "ru", ContentType.COMICS)
internal class AComics(context: MangaLoaderContext) :
LegacyPagedMangaParser(context, MangaParserSource.ACOMICS, pageSize = 10) {
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
SortOrder.ALPHABETICAL,
SortOrder.POPULARITY,
)
override val configKeyDomain = ConfigKey.Domain("acomics.ru")
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
)
init {
paginator.firstPage = 0
searchPaginator.firstPage = 0
context.cookieJar.insertCookies(domain, "ageRestrict=18")
}
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = getOrCreateTagMap().values.toSet(),
availableStates = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED),
)
override suspend fun getListPage(
page: Int,
order: SortOrder,
filter: MangaListFilter,
): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
if (page > 0) {
return emptyList()
}
append("/search?keyword=")
append(filter.query)
}
else -> {
append("/comics?ratings[]=1&ratings[]=2&ratings[]=3&ratings[]=4&ratings[]=5&ratings[]=6&skip=")
append(page * 10)
append("&sort=")
append(
when (order) {
SortOrder.UPDATED -> "last_update"
SortOrder.ALPHABETICAL -> "serial_name"
SortOrder.POPULARITY -> "subscr_count"
else -> "last_update"
},
)
if (filter.tags.isNotEmpty()) {
append("&categories=")
append(filter.tags.joinToString(separator = ",") { it.key })
}
if (filter.states.isNotEmpty()) {
append("&updatable=")
append(
filter.states.oneOrThrowIfMany().let {
when (it) {
MangaState.ONGOING -> "yes"
MangaState.FINISHED -> "no"
else -> "0"
}
},
)
}
}
}
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
private fun parseMangaList(docs: Document): List<Manga> {
return docs.select("table.list-loadable").map {
val a = it.selectFirstOrThrow("a")
val url = a.attrAsAbsoluteUrl("href") + "/about"
Manga(
id = generateUid(url),
url = url,
title = it.selectFirstOrThrow(".title").text(),
altTitles = emptySet(),
publicUrl = url,
rating = RATING_UNKNOWN,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = it.selectFirstOrThrow("img").src().orEmpty(),
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
private var tagCache: ArrayMap<String, MangaTag>? = null
private val mutex = Mutex()
private suspend fun getOrCreateTagMap(): Map<String, MangaTag> = mutex.withLock {
tagCache?.let { return@withLock it }
val tagMap = ArrayMap<String, MangaTag>()
val tagElements =
webClient.httpGet("https://$domain/comics").parseHtml().requireElementById("catalog").select(" a.button")
for (el in tagElements) {
val name = el.html().substringAfterLast("</span>")
if (name.isEmpty()) continue
tagMap[name] = MangaTag(
title = name,
key = el.attr("onclick").substringAfterLast("('").substringBefore("')"),
source = source,
)
}
tagCache = tagMap
return@withLock tagMap
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val tagMap = getOrCreateTagMap()
val tags = doc.select("p.serial-about-badges .category").mapNotNullToSet { tagMap[it.text()] }
val author = doc.selectFirst("p:contains(Автор оригинала:)")?.text()?.replace("Автор оригинала: ", "")
return manga.copy(
tags = tags,
description = doc.selectFirst("section.serial-about-text p")?.text(),
authors = author?.let { setOf(it) } ?: emptySet(),
chapters = listOf(
MangaChapter(
id = manga.id,
name = manga.title,
number = 1f,
volume = 0,
url = manga.url.replace("/about", "/"),
scanlator = null,
uploadDate = 0,
branch = null,
source = source,
),
),
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url + "1").parseHtml()
val totalPages = doc.selectFirstOrThrow("span.issueNumber").text().substringAfterLast('/').toInt()
return (1..totalPages).map {
val url = chapter.url + it
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override suspend fun getPageUrl(page: MangaPage): String {
val doc = webClient.httpGet(page.url.toAbsoluteUrl(domain)).parseHtml()
return doc.requireElementById("mainImage").requireSrc()
}
LegacyPagedMangaParser(context, MangaParserSource.ACOMICS, pageSize = 10) {
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
SortOrder.ALPHABETICAL,
SortOrder.POPULARITY,
)
override val configKeyDomain = ConfigKey.Domain("acomics.ru")
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
)
init {
paginator.firstPage = 0
searchPaginator.firstPage = 0
context.cookieJar.insertCookies(domain, "ageRestrict=18")
}
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = getOrCreateTagMap().values.toSet(),
availableStates = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED),
)
override suspend fun getListPage(
page: Int,
order: SortOrder,
filter: MangaListFilter,
): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
if (page > 0) {
return emptyList()
}
append("/search?keyword=")
append(filter.query)
}
else -> {
append("/comics?ratings[]=1&ratings[]=2&ratings[]=3&ratings[]=4&ratings[]=5&ratings[]=6&skip=")
append(page * 10)
append("&sort=")
append(
when (order) {
SortOrder.UPDATED -> "last_update"
SortOrder.ALPHABETICAL -> "serial_name"
SortOrder.POPULARITY -> "subscr_count"
else -> "last_update"
},
)
if (filter.tags.isNotEmpty()) {
append("&categories=")
append(filter.tags.joinToString(separator = ",") { it.key })
}
if (filter.states.isNotEmpty()) {
append("&updatable=")
append(
filter.states.oneOrThrowIfMany().let {
when (it) {
MangaState.ONGOING -> "yes"
MangaState.FINISHED -> "no"
else -> "0"
}
},
)
}
}
}
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
private fun parseMangaList(docs: Document): List<Manga> {
return docs.select("table.list-loadable").map {
val a = it.selectFirstOrThrow("a")
val url = a.attrAsAbsoluteUrl("href") + "/about"
Manga(
id = generateUid(url),
url = url,
title = it.selectFirstOrThrow(".title").text(),
altTitles = emptySet(),
publicUrl = url,
rating = RATING_UNKNOWN,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = it.selectFirstOrThrow("img").src().orEmpty(),
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
private var tagCache: ArrayMap<String, MangaTag>? = null
private val mutex = Mutex()
private suspend fun getOrCreateTagMap(): Map<String, MangaTag> = mutex.withLock {
tagCache?.let { return@withLock it }
val tagMap = ArrayMap<String, MangaTag>()
val tagElements =
webClient.httpGet("https://$domain/comics").parseHtml().requireElementById("catalog").select(" a.button")
for (el in tagElements) {
val name = el.html().substringAfterLast("</span>")
if (name.isEmpty()) continue
tagMap[name] = MangaTag(
title = name,
key = el.attr("onclick").substringAfterLast("('").substringBefore("')"),
source = source,
)
}
tagCache = tagMap
return@withLock tagMap
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val tagMap = getOrCreateTagMap()
val tags = doc.select("p.serial-about-badges .category").mapNotNullToSet { tagMap[it.text()] }
val author = doc.selectFirst("p:contains(Автор оригинала:)")?.text()?.replace("Автор оригинала: ", "")
return manga.copy(
tags = tags,
description = doc.selectFirst("section.serial-about-text p")?.text(),
authors = setOfNotNull(author),
chapters = listOf(
MangaChapter(
id = manga.id,
name = manga.title,
number = 1f,
volume = 0,
url = manga.url.replace("/about", "/"),
scanlator = null,
uploadDate = 0,
branch = null,
source = source,
),
),
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url + "1").parseHtml()
val totalPages = doc.selectFirstOrThrow("span.issueNumber").text().substringAfterLast('/').toInt()
return (1..totalPages).map {
val url = chapter.url + it
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override suspend fun getPageUrl(page: MangaPage): String {
val doc = webClient.httpGet(page.url.toAbsoluteUrl(domain)).parseHtml()
return doc.requireElementById("mainImage").requireSrc()
}
}

@ -99,7 +99,7 @@ internal class NudeMoonParser(
url = href,
title = title.substringAfter(" / "),
altTitles = setOfNotNull(title.substringBefore(" / ", "").takeUnless { it.isBlank() }),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
coverUrl = row.selectFirst("img")?.absUrl("src").orEmpty(),
tags = row.selectFirst(".tag-links")?.select("a")?.mapToSet {
MangaTag(

@ -62,7 +62,7 @@ internal class WaMangaParser(
"закончен" -> MangaState.FINISHED
else -> MangaState.UPCOMING
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
source = source,
contentRating = if (doc.getIntOrDefault("adult", 0) == 0) {
ContentRating.SAFE

@ -60,7 +60,7 @@ internal abstract class ChanParser(
publicUrl = href.toAbsoluteUrl(a.host ?: domain),
altTitles = setOfNotNull(title.second),
title = title.first,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
coverUrl = row.selectFirst("div.manga_images")?.selectFirst("img")
?.absUrl("src").orEmpty(),
tags = runCatching {
@ -176,7 +176,7 @@ internal abstract class ChanParser(
publicUrl = href.toAbsoluteUrl(a.host ?: domain),
altTitles = setOfNotNull(title.second),
title = title.first,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
coverUrl = div.selectFirst("img")?.absUrl("src").orEmpty(),
tags = emptySet(),
rating = RATING_UNKNOWN,

@ -17,346 +17,346 @@ import java.text.SimpleDateFormat
import java.util.*
internal abstract class LibSocialParser(
context: MangaLoaderContext,
source: MangaParserSource,
protected val siteDomain: String,
protected val siteId: Int,
context: MangaLoaderContext,
source: MangaParserSource,
protected val siteDomain: String,
protected val siteId: Int,
) : LegacyPagedMangaParser(context, source, pageSize = 60) {
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
SortOrder.POPULARITY,
SortOrder.RATING,
SortOrder.NEWEST,
SortOrder.ALPHABETICAL,
SortOrder.ALPHABETICAL_DESC,
)
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
SortOrder.POPULARITY,
SortOrder.RATING,
SortOrder.NEWEST,
SortOrder.ALPHABETICAL,
SortOrder.ALPHABETICAL_DESC,
)
final override val configKeyDomain = ConfigKey.Domain(siteDomain)
final override val configKeyDomain = ConfigKey.Domain(siteDomain)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
isSearchWithFiltersSupported = true,
)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
isSearchWithFiltersSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableStates = EnumSet.allOf(MangaState::class.java),
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableStates = EnumSet.allOf(MangaState::class.java),
)
private val statesMap = intObjectMapOf(
1, MangaState.ONGOING,
2, MangaState.FINISHED,
3, MangaState.UPCOMING,
4, MangaState.PAUSED,
5, MangaState.ABANDONED,
)
private val imageServers = suspendLazy(initializer = ::fetchServers)
private val splitTranslationsKey = ConfigKey.SplitByTranslations(true)
private val preferredServerKey = ConfigKey.PreferredImageServer(
presetValues = mapOf(
null to null,
SERVER_MAIN to "Первый",
SERVER_SECONDARY to "Второй",
SERVER_COMPRESS to "Сжатия",
SERVER_DOWNLOAD to "Загрузки",
SERVER_CROP to "Обрезки",
),
defaultValue = null,
)
private val statesMap = intObjectMapOf(
1, MangaState.ONGOING,
2, MangaState.FINISHED,
3, MangaState.UPCOMING,
4, MangaState.PAUSED,
5, MangaState.ABANDONED,
)
private val imageServers = suspendLazy(initializer = ::fetchServers)
private val splitTranslationsKey = ConfigKey.SplitByTranslations(true)
private val preferredServerKey = ConfigKey.PreferredImageServer(
presetValues = mapOf(
null to null,
SERVER_MAIN to "Первый",
SERVER_SECONDARY to "Второй",
SERVER_COMPRESS to "Сжатия",
SERVER_DOWNLOAD to "Загрузки",
SERVER_CROP to "Обрезки",
),
defaultValue = null,
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val urlBuilder = HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addQueryParameter("site_id[]", siteId.toString())
.addQueryParameter("fields[]", "rate")
.addQueryParameter("fields[]", "rate_avg")
.addQueryParameter("page", page.toString())
for (state in filter.states) {
urlBuilder.addQueryParameter("status[]", statesMap.keyOf(state).toString())
}
for (tag in filter.tags) {
urlBuilder.addQueryParameter("${tag.typeKey()}[]", tag.key.drop(1))
}
for (tag in filter.tagsExclude) {
urlBuilder.addQueryParameter("${tag.typeKey()}_exclude[]", tag.key.drop(1))
}
if (!filter.query.isNullOrEmpty()) {
urlBuilder.addQueryParameter("q", filter.query)
}
urlBuilder.addQueryParameter(
"sort_by",
when (order) {
SortOrder.UPDATED -> "last_chapter_at"
SortOrder.POPULARITY -> "views"
SortOrder.RATING -> "rate_avg"
SortOrder.NEWEST -> "created_at"
SortOrder.ALPHABETICAL,
SortOrder.ALPHABETICAL_DESC,
-> "rus_name"
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val urlBuilder = HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addQueryParameter("site_id[]", siteId.toString())
.addQueryParameter("fields[]", "rate")
.addQueryParameter("fields[]", "rate_avg")
.addQueryParameter("page", page.toString())
for (state in filter.states) {
urlBuilder.addQueryParameter("status[]", statesMap.keyOf(state).toString())
}
for (tag in filter.tags) {
urlBuilder.addQueryParameter("${tag.typeKey()}[]", tag.key.drop(1))
}
for (tag in filter.tagsExclude) {
urlBuilder.addQueryParameter("${tag.typeKey()}_exclude[]", tag.key.drop(1))
}
if (!filter.query.isNullOrEmpty()) {
urlBuilder.addQueryParameter("q", filter.query)
}
urlBuilder.addQueryParameter(
"sort_by",
when (order) {
SortOrder.UPDATED -> "last_chapter_at"
SortOrder.POPULARITY -> "views"
SortOrder.RATING -> "rate_avg"
SortOrder.NEWEST -> "created_at"
SortOrder.ALPHABETICAL,
SortOrder.ALPHABETICAL_DESC,
-> "rus_name"
else -> null
},
)
urlBuilder.addQueryParameter(
"sort_type",
when (order) {
SortOrder.UPDATED,
SortOrder.POPULARITY,
SortOrder.RATING,
SortOrder.NEWEST,
SortOrder.ALPHABETICAL_DESC,
-> "desc"
else -> null
},
)
urlBuilder.addQueryParameter(
"sort_type",
when (order) {
SortOrder.UPDATED,
SortOrder.POPULARITY,
SortOrder.RATING,
SortOrder.NEWEST,
SortOrder.ALPHABETICAL_DESC,
-> "desc"
SortOrder.ALPHABETICAL -> "asc"
else -> null
},
)
val json = webClient.httpGet(urlBuilder.build()).parseJson()
val data = json.getJSONArray("data")
return data.mapJSON(::parseManga)
}
SortOrder.ALPHABETICAL -> "asc"
else -> null
},
)
val json = webClient.httpGet(urlBuilder.build()).parseJson()
val data = json.getJSONArray("data")
return data.mapJSON(::parseManga)
}
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val chapters = async { fetchChapters(manga) }
val url = HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addPathSegment(manga.url)
.addQueryParameter("fields[]", "summary")
.addQueryParameter("fields[]", "genres")
.addQueryParameter("fields[]", "tags")
.addQueryParameter("fields[]", "authors")
.build()
val json = webClient.httpGet(url).parseJson().getJSONObject("data")
val genres = json.getJSONArray("genres").mapJSON { jo ->
MangaTag(title = jo.getString("name"), key = "g" + jo.getInt("id"), source = source)
}
val tags = json.getJSONArray("genres").mapJSON { jo ->
MangaTag(title = jo.getString("name"), key = "t" + jo.getInt("id"), source = source)
}
val author = json.getJSONArray("authors").optJSONObject(0)?.getStringOrNull("name")
manga.copy(
title = json.getStringOrNull("rus_name") ?: manga.title,
altTitles = setOfNotNull(json.getStringOrNull("name")),
tags = tagsSetOf(tags, genres),
authors = author?.let { setOf(it) } ?: emptySet(),
description = json.getString("summary").nl2br(),
chapters = chapters.await(),
)
}
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val chapters = async { fetchChapters(manga) }
val url = HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addPathSegment(manga.url)
.addQueryParameter("fields[]", "summary")
.addQueryParameter("fields[]", "genres")
.addQueryParameter("fields[]", "tags")
.addQueryParameter("fields[]", "authors")
.build()
val json = webClient.httpGet(url).parseJson().getJSONObject("data")
val genres = json.getJSONArray("genres").mapJSON { jo ->
MangaTag(title = jo.getString("name"), key = "g" + jo.getInt("id"), source = source)
}
val tags = json.getJSONArray("genres").mapJSON { jo ->
MangaTag(title = jo.getString("name"), key = "t" + jo.getInt("id"), source = source)
}
val author = json.getJSONArray("authors").optJSONObject(0)?.getStringOrNull("name")
manga.copy(
title = json.getStringOrNull("rus_name") ?: manga.title,
altTitles = setOfNotNull(json.getStringOrNull("name")),
tags = tagsSetOf(tags, genres),
authors = setOfNotNull(author),
description = json.getString("summary").nl2br(),
chapters = chapters.await(),
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> = coroutineScope {
val pages = async {
webClient.httpGet(
concatUrl("https://api.lib.social/api/manga/", chapter.url),
).parseJson().getJSONObject("data")
}
val servers = imageServers.get()
val json = pages.await()
val primaryServer = getPrimaryImageServer(servers)
json.getJSONArray("pages").mapJSON { jo ->
val url = jo.getString("url")
MangaPage(
id = generateUid(jo.getLong("id")),
url = concatUrl(primaryServer, url),
preview = servers[SERVER_COMPRESS]?.let { concatUrl(it, url) },
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> = coroutineScope {
val pages = async {
webClient.httpGet(
concatUrl("https://api.lib.social/api/manga/", chapter.url),
).parseJson().getJSONObject("data")
}
val servers = imageServers.get()
val json = pages.await()
val primaryServer = getPrimaryImageServer(servers)
json.getJSONArray("pages").mapJSON { jo ->
val url = jo.getString("url")
MangaPage(
id = generateUid(jo.getLong("id")),
url = concatUrl(primaryServer, url),
preview = servers[SERVER_COMPRESS]?.let { concatUrl(it, url) },
source = source,
)
}
}
private suspend fun fetchAvailableTags(): Set<MangaTag> = coroutineScope {
val tags = async { fetchTags("tags") }
val genres = async { fetchTags("genres") }
tagsSetOf(tags.await(), genres.await())
}
private suspend fun fetchAvailableTags(): Set<MangaTag> = coroutineScope {
val tags = async { fetchTags("tags") }
val genres = async { fetchTags("genres") }
tagsSetOf(tags.await(), genres.await())
}
override suspend fun getRelatedManga(seed: Manga): List<Manga> {
val json = webClient.httpGet(
HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addPathSegment(seed.url)
.addPathSegment("similar")
.build(),
).parseJson().getJSONArray("data")
return json.mapJSON { jo ->
parseManga(jo.getJSONObject("media"))
}
}
override suspend fun getRelatedManga(seed: Manga): List<Manga> {
val json = webClient.httpGet(
HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addPathSegment(seed.url)
.addPathSegment("similar")
.build(),
).parseJson().getJSONArray("data")
return json.mapJSON { jo ->
parseManga(jo.getJSONObject("media"))
}
}
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.remove(configKeyDomain)
keys.add(splitTranslationsKey)
keys.add(preferredServerKey)
}
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.remove(configKeyDomain)
keys.add(splitTranslationsKey)
keys.add(preferredServerKey)
}
private fun parseManga(jo: JSONObject): Manga {
val cover = jo.getJSONObject("cover")
val isNsfwSource = jo.getJSONObject("ageRestriction").getIntOrDefault("id", 0) >= 3
return Manga(
id = generateUid(jo.getLong("id")),
title = jo.getString("rus_name").ifEmpty { jo.getString("name") },
altTitles = setOfNotNull(jo.getString("name")),
url = jo.getString("slug_url"),
publicUrl = "https://$siteDomain/ru/manga/" + jo.getString("slug_url"),
rating = jo.optJSONObject("rating")
?.getFloatOrDefault("average", RATING_UNKNOWN * 10f)?.div(10f) ?: RATING_UNKNOWN,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = cover.getString("thumbnail"),
tags = setOf(),
state = statesMap[jo.optJSONObject("status")?.getIntOrDefault("id", -1) ?: -1],
authors = emptySet(),
largeCoverUrl = cover.getString("default"),
source = source,
)
}
private fun parseManga(jo: JSONObject): Manga {
val cover = jo.getJSONObject("cover")
val isNsfwSource = jo.getJSONObject("ageRestriction").getIntOrDefault("id", 0) >= 3
return Manga(
id = generateUid(jo.getLong("id")),
title = jo.getString("rus_name").ifEmpty { jo.getString("name") },
altTitles = setOfNotNull(jo.getString("name")),
url = jo.getString("slug_url"),
publicUrl = "https://$siteDomain/ru/manga/" + jo.getString("slug_url"),
rating = jo.optJSONObject("rating")
?.getFloatOrDefault("average", RATING_UNKNOWN * 10f)?.div(10f) ?: RATING_UNKNOWN,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = cover.getString("thumbnail"),
tags = setOf(),
state = statesMap[jo.optJSONObject("status")?.getIntOrDefault("id", -1) ?: -1],
authors = emptySet(),
largeCoverUrl = cover.getString("default"),
source = source,
)
}
private fun getPrimaryImageServer(servers: ScatterMap<String, String>): String {
val preferred = config[preferredServerKey]
if (preferred != null) {
servers[preferred]?.let { return it }
}
return checkNotNull(servers[SERVER_MAIN] ?: servers[SERVER_DOWNLOAD] ?: servers[SERVER_SECONDARY]) {
"No available images servers"
}
}
private fun getPrimaryImageServer(servers: ScatterMap<String, String>): String {
val preferred = config[preferredServerKey]
if (preferred != null) {
servers[preferred]?.let { return it }
}
return checkNotNull(servers[SERVER_MAIN] ?: servers[SERVER_DOWNLOAD] ?: servers[SERVER_SECONDARY]) {
"No available images servers"
}
}
private suspend fun fetchChapters(manga: Manga): List<MangaChapter> {
val url = HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addPathSegment(manga.url)
.addPathSegment("chapters")
.build()
val json = webClient.httpGet(url).parseJson().getJSONArray("data")
val builder = ChaptersListBuilder(json.length())
val dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.US)
val useBranching = config[splitTranslationsKey]
for (i in 0 until json.length()) {
val jo = json.getJSONObject(i)
val volume = jo.getIntOrDefault("volume", 0)
val number = jo.getFloatOrDefault("number", 0f)
val numberString = number.formatSimple()
val name = jo.getStringOrNull("name") ?: buildString {
if (volume > 0) append("Том ").append(volume).append(' ')
append("Глава ").append(numberString)
}
val branches = jo.getJSONArray("branches")
for (j in 0 until branches.length()) {
val bjo = branches.getJSONObject(j)
val id = bjo.getLong("id")
val team = bjo.getJSONArray("teams").optJSONObject(0)?.getStringOrNull("name")
builder += MangaChapter(
id = generateUid(id),
name = name,
number = number,
volume = volume,
url = "${manga.url}/chapter?number=$numberString&volume=$volume",
scanlator = team,
uploadDate = dateFormat.tryParse(bjo.getStringOrNull("created_at")),
branch = if (useBranching) team else null,
source = source,
)
}
}
return builder.toList()
}
private suspend fun fetchChapters(manga: Manga): List<MangaChapter> {
val url = HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("manga")
.addPathSegment(manga.url)
.addPathSegment("chapters")
.build()
val json = webClient.httpGet(url).parseJson().getJSONArray("data")
val builder = ChaptersListBuilder(json.length())
val dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.US)
val useBranching = config[splitTranslationsKey]
for (i in 0 until json.length()) {
val jo = json.getJSONObject(i)
val volume = jo.getIntOrDefault("volume", 0)
val number = jo.getFloatOrDefault("number", 0f)
val numberString = number.formatSimple()
val name = jo.getStringOrNull("name") ?: buildString {
if (volume > 0) append("Том ").append(volume).append(' ')
append("Глава ").append(numberString)
}
val branches = jo.getJSONArray("branches")
for (j in 0 until branches.length()) {
val bjo = branches.getJSONObject(j)
val id = bjo.getLong("id")
val team = bjo.getJSONArray("teams").optJSONObject(0)?.getStringOrNull("name")
builder += MangaChapter(
id = generateUid(id),
name = name,
number = number,
volume = volume,
url = "${manga.url}/chapter?number=$numberString&volume=$volume",
scanlator = team,
uploadDate = dateFormat.tryParse(bjo.getStringOrNull("created_at")),
branch = if (useBranching) team else null,
source = source,
)
}
}
return builder.toList()
}
private suspend fun fetchTags(type: String): List<MangaTag> {
val data = webClient.httpGet(
HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api").addPathSegment(type).build(),
).parseJson().getJSONArray("data")
val prefix = type.first().toString()
return data.mapJSONNotNull { jo ->
val sites = jo.getJSONArray("site_ids").toIntSet()
if (siteId !in sites) {
return@mapJSONNotNull null
}
MangaTag(
title = jo.getString("name"),
key = prefix + jo.getInt("id"),
source = source,
)
}
}
private suspend fun fetchTags(type: String): List<MangaTag> {
val data = webClient.httpGet(
HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api").addPathSegment(type).build(),
).parseJson().getJSONArray("data")
val prefix = type.first().toString()
return data.mapJSONNotNull { jo ->
val sites = jo.getJSONArray("site_ids").toIntSet()
if (siteId !in sites) {
return@mapJSONNotNull null
}
MangaTag(
title = jo.getString("name"),
key = prefix + jo.getInt("id"),
source = source,
)
}
}
private suspend fun fetchServers(): ScatterMap<String, String> {
val json = webClient.httpGet(
HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("constants")
.addQueryParameter("fields[]", "imageServers")
.build(),
).parseJson().getJSONObject("data").getJSONArray("imageServers")
val result = MutableScatterMap<String, String>()
for (i in 0 until json.length()) {
val jo = json.getJSONObject(i)
val sites = jo.getJSONArray("site_ids").toIntSet()
if (siteId !in sites) {
continue
}
result[jo.getString("id")] = jo.getString("url")
}
return result
}
private suspend fun fetchServers(): ScatterMap<String, String> {
val json = webClient.httpGet(
HttpUrl.Builder()
.scheme("https")
.host("api.lib.social")
.addPathSegment("api")
.addPathSegment("constants")
.addQueryParameter("fields[]", "imageServers")
.build(),
).parseJson().getJSONObject("data").getJSONArray("imageServers")
val result = MutableScatterMap<String, String>()
for (i in 0 until json.length()) {
val jo = json.getJSONObject(i)
val sites = jo.getJSONArray("site_ids").toIntSet()
if (siteId !in sites) {
continue
}
result[jo.getString("id")] = jo.getString("url")
}
return result
}
private fun <V> IntObjectMap<V>.keyOf(value: V): Int {
forEach { k, v ->
if (v == value) {
return k
}
}
throw NoSuchElementException("No key associated with value $value")
}
private fun <V> IntObjectMap<V>.keyOf(value: V): Int {
forEach { k, v ->
if (v == value) {
return k
}
}
throw NoSuchElementException("No key associated with value $value")
}
private fun JSONArray.toIntSet(): IntSet {
val result = MutableIntSet(length())
for (i in 0 until length()) {
result.add(getInt(i))
}
return result
}
private fun JSONArray.toIntSet(): IntSet {
val result = MutableIntSet(length())
for (i in 0 until length()) {
result.add(getInt(i))
}
return result
}
private fun MangaTag.typeKey() = when (key.firstOrNull()) {
'g' -> "genres"
't' -> "tags"
else -> throw IllegalArgumentException("Tag $key($title) is of unknown type")
}
private fun MangaTag.typeKey() = when (key.firstOrNull()) {
'g' -> "genres"
't' -> "tags"
else -> throw IllegalArgumentException("Tag $key($title) is of unknown type")
}
private fun tagsSetOf(tags: Collection<MangaTag>, genres: Collection<MangaTag>): Set<MangaTag> {
val result = ArraySet<MangaTag>(tags.size + genres.size)
val names = HashSet<String>(tags.size + genres.size)
genres.forEach { x -> if (names.add(x.title)) result.add(x) }
tags.forEach { x -> if (names.add(x.title)) result.add(x) }
return result
}
private fun tagsSetOf(tags: Collection<MangaTag>, genres: Collection<MangaTag>): Set<MangaTag> {
val result = ArraySet<MangaTag>(tags.size + genres.size)
val names = HashSet<String>(tags.size + genres.size)
genres.forEach { x -> if (names.add(x.title)) result.add(x) }
tags.forEach { x -> if (names.add(x.title)) result.add(x) }
return result
}
protected companion object {
protected companion object {
const val SERVER_MAIN = "main"
const val SERVER_SECONDARY = "secondary"
const val SERVER_COMPRESS = "compress"
const val SERVER_DOWNLOAD = "download"
const val SERVER_CROP = "crop"
}
const val SERVER_MAIN = "main"
const val SERVER_SECONDARY = "secondary"
const val SERVER_COMPRESS = "compress"
const val SERVER_DOWNLOAD = "download"
const val SERVER_CROP = "crop"
}
}

@ -148,7 +148,7 @@ internal abstract class ScanParser(
?.ownText()?.toFloatOrNull()?.div(5f)
?: RATING_UNKNOWN,
tags = tags,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
altTitles = setOfNotNull(doc.selectFirst(".card div.col-12.mb-4 h2, .card-series-about .h6")?.textOrNull()),
description = doc.selectFirst(".card div.col-12.mb-4 p, .card-series-desc .mb-4 p")?.html(),
chapters = doc.select(".chapters-list .col-chapter, .card-list-chapter .col-chapter")

@ -27,7 +27,7 @@ internal class MangaFr(context: MangaLoaderContext) :
?.ownText()?.toFloatOrNull()?.div(5f)
?: RATING_UNKNOWN,
tags = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
altTitles = setOfNotNull(doc.selectFirst(".card div.col-12.mb-4 h2, .card-series-about .h6")?.textOrNull()),
description = doc.selectFirst(".card div.col-12.mb-4 p, .card-series-desc .mb-4 p")?.html(),
chapters = doc.select(".chapters-list .col-chapter, .card-list-chapter .col-chapter")

@ -28,7 +28,7 @@ internal class ScanIta(context: MangaLoaderContext) :
rating = doc.selectFirst(".card-series-detail .rate-value span")?.ownText()?.toFloatOrNull()?.div(5f)
?: RATING_UNKNOWN,
tags = tags,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
altTitles = setOfNotNull(doc.selectFirst(".card div.col-12.mb-4 h2")?.textOrNull()),
description = doc.selectFirst(".card div.col-12.mb-4 p")?.html(),
chapters = chaptersDeferred.await(),

@ -72,7 +72,7 @@ internal class SadScans(context: MangaLoaderContext) :
else -> null
},
tags = emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirstOrThrow(".summary").html(),
chapters = doc.select(".chap-section .chap")
.mapChapters(reversed = true) { i, div ->

@ -26,173 +26,173 @@ private const val PAGE_SIZE = 60
// NOTE High profile focus
@MangaSourceParser("HENTAIUKR", "HentaiUkr", "uk", ContentType.HENTAI)
internal class HentaiUkrParser(context: MangaLoaderContext) : LegacyMangaParser(context, MangaParserSource.HENTAIUKR),
Interceptor {
private val date = SimpleDateFormat("yyyy-MM-dd", Locale.US)
private val allManga = suspendLazy(soft = true) {
runCatchingCancellable {
webClient.httpGet("https://$domain/search/objects.json").parseJson()
}.recoverCatchingCancellable {
webClient.httpGet("https://$domain/search/objects2.json").parseJson()
}.recoverCatchingCancellable {
webClient.httpGet("https://$domain/search/objects69.json").parseJson()
}.getOrThrow().getJSONArray("manga").asTypedList<JSONObject>()
}
override val configKeyDomain: ConfigKey.Domain = ConfigKey.Domain("hentaiukr.com")
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
isSearchWithFiltersSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.NEWEST,
)
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val jsonDeferred = async { allManga.get().first { it.getString("url") == manga.url } }
val htmlDeferred = async { webClient.httpGet("https://$domain${manga.url}").parseHtml() }
val about = htmlDeferred.await().body().requireElementById("about").text()
manga.copy(
description = about,
chapters = listOf(
MangaChapter(
id = generateUid(manga.id),
name = manga.title,
number = 1f,
volume = 0,
url = manga.url,
scanlator = null,
uploadDate = date.tryParse(jsonDeferred.await().getString("add_date")),
branch = null,
source = source,
),
),
)
}
override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
// Get all manga
val json = allManga.get().toMutableList()
if (!filter.query.isNullOrEmpty()) {
json.retainAll { item ->
item.getString("name").contains(filter.query, ignoreCase = true) ||
item.getStringOrNull("eng_name")?.contains(filter.query, ignoreCase = true) == true ||
item.getStringOrNull("orig_name")?.contains(filter.query, ignoreCase = true) == true ||
item.getStringOrNull("author")?.contains(filter.query, ignoreCase = true) == true ||
item.getStringOrNull("team")?.contains(filter.query, ignoreCase = true) == true
}
}
if (filter.tags.isNotEmpty()) {
val ids = filter.tags.mapToSet { it.key }
json.retainAll { item ->
item.getJSONArray("tags")
.mapJSON { it.getAsString() }
.any { x -> x in ids }
}
}
// Return to app
return json.drop(offset).take(PAGE_SIZE).map { jo ->
val id = jo.getAsLong()
val author = jo.getStringOrNull("author")
Manga(
id = generateUid(id),
title = jo.getString("name"),
altTitles = setOfNotNull(jo.getStringOrNull("eng_name")),
url = jo.getString("url"),
publicUrl = jo.getString("url").toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = ContentRating.ADULT,
coverUrl = jo.getString("thumb").toAbsoluteUrl(domain),
tags = getTags(jo.optJSONArray("tags")),
state = null,
authors = author?.let { setOf(it) } ?: emptySet(),
largeCoverUrl = null,
description = null,
chapters = null,
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val htmlPages = webClient.httpGet("https://$domain${chapter.url}vertical_reader.html").parseHtml()
return htmlPages.select("img.image").mapIndexed { i, page ->
MangaPage(
id = generateUid(i.toString()),
"https://$domain${page.attr("src")}",
null,
source,
)
}
}
private suspend fun fetchAvailableTags(): Set<MangaTag> {
return allManga.get().flatMapTo(HashSet()) { x ->
x.getJSONArray("tags").mapJSON { t ->
MangaTag(
title = t.getString("name"),
key = t.getAsString(),
source = source,
)
}
}
}
private fun getTags(jsonTags: JSONArray): Set<MangaTag> {
val tagsSet = ArraySet<MangaTag>(jsonTags.length())
repeat(jsonTags.length()) { i ->
val item = jsonTags.getJSONObject(i)
tagsSet.add(
MangaTag(
title = item.getString("name"),
key = item.getAsString(),
source = source,
),
)
}
return tagsSet
}
// Need for disable encoding (with encoding not working)
override fun intercept(chain: Interceptor.Chain): Response {
val request = chain.request()
val newRequest = if (request.header(HEADER_ENCODING) != null) {
request.newBuilder().removeHeader(HEADER_ENCODING).build()
} else {
request
}
return chain.proceed(newRequest)
}
private fun JSONObject.getAsLong(): Long {
val rawValue = opt("id")
return when (rawValue) {
null, JSONObject.NULL -> null
is Long -> rawValue
is Number -> rawValue.toLong()
is String -> rawValue.toLong()
else -> null
} ?: error("Cannot read value $rawValue as Long")
}
private fun JSONObject.getAsString(): String {
return get("id").toString()
}
Interceptor {
private val date = SimpleDateFormat("yyyy-MM-dd", Locale.US)
private val allManga = suspendLazy(soft = true) {
runCatchingCancellable {
webClient.httpGet("https://$domain/search/objects.json").parseJson()
}.recoverCatchingCancellable {
webClient.httpGet("https://$domain/search/objects2.json").parseJson()
}.recoverCatchingCancellable {
webClient.httpGet("https://$domain/search/objects69.json").parseJson()
}.getOrThrow().getJSONArray("manga").asTypedList<JSONObject>()
}
override val configKeyDomain: ConfigKey.Domain = ConfigKey.Domain("hentaiukr.com")
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
isSearchWithFiltersSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.NEWEST,
)
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val jsonDeferred = async { allManga.get().first { it.getString("url") == manga.url } }
val htmlDeferred = async { webClient.httpGet("https://$domain${manga.url}").parseHtml() }
val about = htmlDeferred.await().body().requireElementById("about").text()
manga.copy(
description = about,
chapters = listOf(
MangaChapter(
id = generateUid(manga.id),
name = manga.title,
number = 1f,
volume = 0,
url = manga.url,
scanlator = null,
uploadDate = date.tryParse(jsonDeferred.await().getString("add_date")),
branch = null,
source = source,
),
),
)
}
override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
// Get all manga
val json = allManga.get().toMutableList()
if (!filter.query.isNullOrEmpty()) {
json.retainAll { item ->
item.getString("name").contains(filter.query, ignoreCase = true) ||
item.getStringOrNull("eng_name")?.contains(filter.query, ignoreCase = true) == true ||
item.getStringOrNull("orig_name")?.contains(filter.query, ignoreCase = true) == true ||
item.getStringOrNull("author")?.contains(filter.query, ignoreCase = true) == true ||
item.getStringOrNull("team")?.contains(filter.query, ignoreCase = true) == true
}
}
if (filter.tags.isNotEmpty()) {
val ids = filter.tags.mapToSet { it.key }
json.retainAll { item ->
item.getJSONArray("tags")
.mapJSON { it.getAsString() }
.any { x -> x in ids }
}
}
// Return to app
return json.drop(offset).take(PAGE_SIZE).map { jo ->
val id = jo.getAsLong()
val author = jo.getStringOrNull("author")
Manga(
id = generateUid(id),
title = jo.getString("name"),
altTitles = setOfNotNull(jo.getStringOrNull("eng_name")),
url = jo.getString("url"),
publicUrl = jo.getString("url").toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = ContentRating.ADULT,
coverUrl = jo.getString("thumb").toAbsoluteUrl(domain),
tags = getTags(jo.optJSONArray("tags")),
state = null,
authors = setOfNotNull(author),
largeCoverUrl = null,
description = null,
chapters = null,
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val htmlPages = webClient.httpGet("https://$domain${chapter.url}vertical_reader.html").parseHtml()
return htmlPages.select("img.image").mapIndexed { i, page ->
MangaPage(
id = generateUid(i.toString()),
"https://$domain${page.attr("src")}",
null,
source,
)
}
}
private suspend fun fetchAvailableTags(): Set<MangaTag> {
return allManga.get().flatMapTo(HashSet()) { x ->
x.getJSONArray("tags").mapJSON { t ->
MangaTag(
title = t.getString("name"),
key = t.getAsString(),
source = source,
)
}
}
}
private fun getTags(jsonTags: JSONArray): Set<MangaTag> {
val tagsSet = ArraySet<MangaTag>(jsonTags.length())
repeat(jsonTags.length()) { i ->
val item = jsonTags.getJSONObject(i)
tagsSet.add(
MangaTag(
title = item.getString("name"),
key = item.getAsString(),
source = source,
),
)
}
return tagsSet
}
// Need for disable encoding (with encoding not working)
override fun intercept(chain: Interceptor.Chain): Response {
val request = chain.request()
val newRequest = if (request.header(HEADER_ENCODING) != null) {
request.newBuilder().removeHeader(HEADER_ENCODING).build()
} else {
request
}
return chain.proceed(newRequest)
}
private fun JSONObject.getAsLong(): Long {
val rawValue = opt("id")
return when (rawValue) {
null, JSONObject.NULL -> null
is Long -> rawValue
is Number -> rawValue.toLong()
is String -> rawValue.toLong()
else -> null
} ?: error("Cannot read value $rawValue as Long")
}
private fun JSONObject.getAsString(): String {
return get("id").toString()
}
}

@ -83,7 +83,7 @@ internal class BlogTruyenParser(context: MangaLoaderContext) :
description = mangaInfo.select("div.al-j.fs-12").text(),
url = relativeUrl,
publicUrl = relativeUrl.toAbsoluteUrl(domain),
coverUrl = mangaInfo.selectFirst("div > img.img")?.src().orEmpty(),
coverUrl = mangaInfo.selectFirst("div > img.img")?.src(),
contentRating = null,
rating = RATING_UNKNOWN,
tags = emptySet(),
@ -129,7 +129,7 @@ internal class BlogTruyenParser(context: MangaLoaderContext) :
return manga.copy(
tags = tags,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirst(".detail .content")?.html(),
chapters = parseChapterList(doc),
largeCoverUrl = doc.selectLast("div.thumbnail > img")?.src(),

@ -116,10 +116,10 @@ internal class BlogTruyenVN(context: MangaLoaderContext) :
id = generateUid(relativeUrl),
title = a.text(),
altTitles = emptySet(),
description = mangaInfo.select("div.al-j.fs-12").text(),
description = mangaInfo.select("div.al-j.fs-12").textOrNull(),
url = relativeUrl,
publicUrl = relativeUrl.toAbsoluteUrl(domain),
coverUrl = mangaInfo.selectFirst("div > img.img")?.src().orEmpty(),
coverUrl = mangaInfo.selectFirst("div > img.img")?.src(),
contentRating = null,
rating = RATING_UNKNOWN,
tags = emptySet(),
@ -181,7 +181,7 @@ internal class BlogTruyenVN(context: MangaLoaderContext) :
return manga.copy(
tags = tags ?: emptySet(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirst(".detail .content")?.html(),
chapters = parseChapterList(doc),
largeCoverUrl = doc.selectLast("div.thumbnail > img")?.src(),

@ -32,7 +32,7 @@ internal class BuonDuaParser(context: MangaLoaderContext) : LegacyMangaParser(co
val df = SimpleDateFormat("HH:mm dd-MM-yyyy")
val time = content.selectFirst("div.article-info > small")?.text()?.trim()
val chapters = content.selectFirst("nav.pagination")?.select("a.pagination-link")
?.mapIndexed { index, element ->
?.mapChapters { index, element ->
val relUrl = element.attrAsRelativeUrl("href")
MangaChapter(
id = generateUid(relUrl),

@ -22,8 +22,7 @@ private const val PAGE_SIZE = 20
internal class CMangaParser(context: MangaLoaderContext) :
LegacyPagedMangaParser(context, MangaParserSource.CMANGA, PAGE_SIZE), MangaParserAuthProvider {
override val configKeyDomain: ConfigKey.Domain
get() = ConfigKey.Domain("cmangax.com")
override val configKeyDomain: ConfigKey.Domain = ConfigKey.Domain("cmangax.com")
override val availableSortOrders: Set<SortOrder>
get() = EnumSet.of(
@ -47,7 +46,7 @@ internal class CMangaParser(context: MangaLoaderContext) :
override suspend fun getFilterOptions(): MangaListFilterOptions {
return MangaListFilterOptions(
availableTags = tags.get().values.toSet(),
availableTags = tags.get().values.toArraySet(),
availableStates = arraySetOf(MangaState.ONGOING, MangaState.FINISHED, MangaState.PAUSED),
)
}
@ -80,14 +79,14 @@ internal class CMangaParser(context: MangaLoaderContext) :
chapters = webClient
.httpGet("/api/chapter_list?album=$mangaId&page=1&limit=${Int.MAX_VALUE}&v=0v21".toAbsoluteUrl(domain))
.parseJsonArray()
.mapJSON { jo ->
.mapChapters(reversed = true) { _, jo ->
val chapterId = jo.getLong("id_chapter")
val info = jo.parseJson("info")
val chapterNumber = info.getString("num")
val chapterNumber = info.getFloatOrDefault("num", -1f) + 1f
MangaChapter(
id = generateUid(chapterId),
name = if (info.isLocked()) "Chapter $chapterNumber - locked" else "Chapter $chapterNumber",
number = chapterNumber.toFloatOrNull()?.plus(1) ?: 0f,
number = chapterNumber,
volume = 0,
url = "/album/$slug/chapter-$mangaId-$chapterId",
uploadDate = df.tryParse(info.getString("last_update")),
@ -95,7 +94,7 @@ internal class CMangaParser(context: MangaLoaderContext) :
scanlator = null,
source = source,
)
}.reversed(),
},
)
}

@ -24,7 +24,7 @@ import java.util.*
@MangaSourceParser("CUUTRUYEN", "Cứu Truyện", "vi")
internal class CuuTruyenParser(context: MangaLoaderContext) :
LegacyPagedMangaParser(context, MangaParserSource.CUUTRUYEN, 20), Interceptor {
LegacyPagedMangaParser(context, MangaParserSource.CUUTRUYEN, 20) {
override val userAgentKey = ConfigKey.UserAgent(UserAgents.KOTATSU)
@ -115,7 +115,7 @@ internal class CuuTruyenParser(context: MangaLoaderContext) :
altTitles = emptySet(),
coverUrl = jo.getString("cover_mobile_url"),
largeCoverUrl = jo.getString("cover_url"),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
tags = emptySet(),
state = null,
description = null,
@ -153,19 +153,21 @@ internal class CuuTruyenParser(context: MangaLoaderContext) :
// Remove old manga status from "tags"
val newTags = tags.filter { it.key != "da-hoan-thanh" && it.key != "dang-tien-hanh" }.toSet()
val author = json.optJSONObject("author")?.getStringOrNull("name")?.substringBefore(',')?.nullIfEmpty()
val title = json.getStringOrNull("name") ?: manga.title
manga.copy(
title = json.getStringOrNull("name") ?: manga.title,
title = title,
altTitles = json.optJSONArray("titles")?.mapJSONToSet { it.getString("name") }?.minus(title).orEmpty(),
contentRating = if (json.getBooleanOrDefault("is_nsfw", manga.isNsfw)) {
ContentRating.ADULT
} else {
ContentRating.SAFE
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = json.getStringOrNull("full_description"),
tags = newTags,
state = state,
chapters = chapters.await().mapJSON { jo ->
chapters = chapters.await().mapChapters(reversed = true) { _, jo ->
val chapterId = jo.getLong("id")
val number = jo.getFloatOrDefault("number", 0f)
MangaChapter(
@ -179,7 +181,7 @@ internal class CuuTruyenParser(context: MangaLoaderContext) :
branch = null,
source = source,
)
}.reversed(),
},
)
}

@ -105,7 +105,7 @@ internal class DuaLeoTruyen(context: MangaLoaderContext) :
"Full" -> MangaState.FINISHED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirst(".story-detail-info")?.html(),
chapters = doc.select(".list-chapters .chapter-item").mapChapters(reversed = true) { i, div ->
val a = div.selectFirstOrThrow(".chap_name a")

@ -94,7 +94,7 @@ internal class Hentai18VN(context: MangaLoaderContext) :
private fun parseMangaSearch(doc: Document): List<Manga> {
return doc.select("a.item").map { a ->
val href = a.attr("href")
val mangaInfo = a.selectFirst("img")
val mangaInfo = a.selectFirstOrThrow("img")
Manga(
id = generateUid(href),
url = href,
@ -105,7 +105,7 @@ internal class Hentai18VN(context: MangaLoaderContext) :
tags = emptySet(),
rating = RATING_UNKNOWN,
state = null,
coverUrl = mangaInfo.requireSrc(),
coverUrl = mangaInfo.src(),
contentRating = ContentRating.ADULT,
source = source,
)
@ -138,13 +138,13 @@ internal class Hentai18VN(context: MangaLoaderContext) :
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val tags = doc.select("div.hentai-info .line-content a.item-tag")
.mapNotNull { a ->
.mapToSet { a ->
MangaTag(
title = a.text(),
key = a.attr("href").substringAfterLast("/"),
title = a.text().toTitleCase(sourceLocale),
key = a.attr("href").substringAfterLast('/'),
source = source,
)
}.toSet()
}
val chapters = doc.select("ul#chapter-list li.citem").mapChapters(reversed = true) { i, li ->
val a = li.selectFirst("a") ?: return@mapChapters null
@ -152,7 +152,7 @@ internal class Hentai18VN(context: MangaLoaderContext) :
id = generateUid(a.attr("href")),
name = a.text(),
number = i + 1f,
url = a.attr("href").removePrefix("https://$domain"),
url = a.attrAsRelativeUrl("href"),
uploadDate = parseChapterDate(li.selectFirst(".time")?.text()),
source = source,
scanlator = null,

@ -115,7 +115,7 @@ internal class HentaiVNParser(context: MangaLoaderContext) : LegacyMangaParser(c
altTitles = infoEl.selectFirst("span.info:contains(Tên Khác:)")?.parent()?.select("span:not(.info) > a")
?.mapNotNullToSet { it.textOrNull() }
.orEmpty(),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = infoEl.select("p:contains(Nội dung:) + p").html(),
tags = tags,
state = stateDoc.select("p:contains(Tình Trạng:) a").firstOrNull()?.text()?.let {

@ -118,7 +118,7 @@ internal class HentaiVnBuzz(context: MangaLoaderContext) :
private fun parseSearchManga(doc: Document): List<Manga> {
return doc.select(".story-item-list.d-flex.align-items-center.position-relative.mb-1").map { div ->
val href = div.selectFirstOrThrow("a.story-item-list__image").attrAsRelativeUrl("href")
val coverUrl = div.selectFirst("img")?.attr("data-src").orEmpty()
val coverUrl = div.selectFirst("img")?.attr("data-src")
val title = div.selectFirst("img")?.attr("alt").orEmpty()
Manga(
id = generateUid(href),
@ -163,11 +163,11 @@ internal class HentaiVnBuzz(context: MangaLoaderContext) :
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val author = doc.select("p:contains(Tác giả:) a").text().nullIfEmpty()
return manga.copy(
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
tags = doc.select("div.mb-1 span a").mapToSet { element ->
MangaTag(
key = element.attr("href").substringAfter("/the-loai/"),
title = element.text().substringBefore(",").trim(), // force trim before , symbol and space
title = element.text().substringBefore(',').trim(), // force trim before , symbol and space
source = source,
)
},

@ -162,7 +162,7 @@ internal class KuroNeko(context: MangaLoaderContext) : LegacyPagedMangaParser(co
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = root.selectFirst("meta[name=description]")?.attrOrNull("content"),
chapters = root.select("div.justify-between ul.overflow-y-auto.overflow-x-hidden a")
.mapChapters(reversed = true) { i, a ->

@ -163,7 +163,7 @@ internal class LxManga(context: MangaLoaderContext) : LegacyPagedMangaParser(con
source = source,
)
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = root.selectFirst("meta[name=description]")?.attrOrNull("content"),
chapters = root.select("div.justify-between ul.overflow-y-auto.overflow-x-hidden a")
.mapChapters(reversed = true) { i, a ->

@ -90,7 +90,7 @@ internal class SayHentai(context: MangaLoaderContext) :
val author = doc.selectFirst("div.summary-heading:contains(Tác giả) + div.summary-content")?.textOrNull()
return manga.copy(
altTitles = setOfNotNull(doc.selectFirst("h2.other-name")?.textOrNull()),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
tags = doc.select("div.genres-content a[rel=tag]").mapToSet { a ->
MangaTag(
key = a.attr("href").substringAfterLast('/'),
@ -178,7 +178,7 @@ internal class SayHentai(context: MangaLoaderContext) :
.mapToSet { a ->
val title = a.ownText().toTitleCase(sourceLocale)
MangaTag(
key = a.attr("href").substringAfterLast("/"),
key = a.attr("href").substringAfterLast('/'),
title = title,
source = source,
)

@ -127,7 +127,7 @@ internal class TruyenGG(context: MangaLoaderContext) : LegacyPagedMangaParser(co
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = div.selectFirst(".image-cover img")?.attr("data-src").orEmpty(),
coverUrl = div.selectFirst(".image-cover img")?.attrAsAbsoluteUrlOrNull("data-src"),
tags = emptySet(),
state = null,
authors = emptySet(),
@ -143,11 +143,11 @@ internal class TruyenGG(context: MangaLoaderContext) : LegacyPagedMangaParser(co
return manga.copy(
altTitles = setOfNotNull(doc.selectFirst("h2.other-name")?.textOrNull()),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
tags = doc.select("a.clblue").mapToSet {
MangaTag(
key = it.attr("href").substringAfterLast('-').substringBeforeLast('.'),
title = it.text(),
title = it.text().toTitleCase(sourceLocale),
source = source,
)
},
@ -196,7 +196,7 @@ internal class TruyenGG(context: MangaLoaderContext) : LegacyPagedMangaParser(co
return doc.select(".advsearch-form div.genre-item").mapToSet {
MangaTag(
key = it.selectFirstOrThrow("span").attr("data-id"),
title = it.text(),
title = it.text().toTitleCase(sourceLocale),
source = source,
)
}

@ -94,6 +94,7 @@ internal class TruyenHentaiVN(context: MangaLoaderContext) :
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val dateFormat = SimpleDateFormat("dd-MM-yyyy", Locale.US)
return manga.copy(
authors = setOfNotNull(doc.selectFirst("div.author i")?.textOrNull()),
tags = doc.select("div.genre.mb-3.mgen a").mapNotNullToSet { a ->
@ -121,13 +122,7 @@ internal class TruyenHentaiVN(context: MangaLoaderContext) :
val name = div.selectFirst("a .name")?.text() ?: ""
val dateStr = div.selectFirst("a span:last-child")?.text()
val uploadDate = dateStr?.let {
try {
SimpleDateFormat("dd-MM-yyyy", Locale.US).parse(it)?.time ?: 0L
} catch (e: Exception) {
0L
}
} ?: 0L
val uploadDate = dateFormat.tryParse(dateStr)
MangaChapter(
id = generateUid(url),
@ -147,15 +142,13 @@ internal class TruyenHentaiVN(context: MangaLoaderContext) :
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
return doc.select("div.content-text img").mapNotNull { img ->
val url = img.requireSrc().toAbsoluteUrl(domain)
if (url.isNotEmpty()) {
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
} else null
val url = img.src() ?: return@mapNotNull null
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}

@ -166,7 +166,7 @@ internal class TruyenQQ(context: MangaLoaderContext) : LegacyPagedMangaParser(co
"Hoàn Thành" -> MangaState.FINISHED
else -> null
},
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = doc.selectFirst(".story-detail-info")?.html(),
chapters = doc.select("div.list_chapter div.works-chapter-item").mapChapters(reversed = true) { i, div ->
val a = div.selectFirstOrThrow("a")

@ -147,7 +147,7 @@ internal class TruyenTranh3Q(context: MangaLoaderContext) :
return manga.copy(
altTitles = setOfNotNull(doc.selectFirst("h2.other-name")?.textOrNull()),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
tags = tags,
description = doc.selectFirst("div.story-detail-info")?.html(),
state = when (doc.selectFirst(".status p.col-xs-9")?.text()) {

@ -134,7 +134,7 @@ internal class VcomycsParser(context: MangaLoaderContext) :
info.selectFirst(".comic-intro-text > strong:contains(Tên khác:)")?.nextElementSibling()
?.textOrNull(),
),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = when (info.selectFirst(".comic-stt")?.text()) {
"Đang tiến hành" -> MangaState.ONGOING
"Trọn bộ" -> MangaState.FINISHED

@ -101,7 +101,6 @@ internal class YurinekoParser(context: MangaLoaderContext) :
val df = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.US)
return manga.copy(
chapters = response.getJSONArray("chapters")
.asTypedList<JSONObject>()
.mapChapters(true) { i, jo ->
val mangaId = jo.getInt("mangaID")
val chapterId = jo.getInt("id")

@ -168,7 +168,7 @@ internal abstract class WpComicsParser(
largeCoverUrl = null,
tags = mangaTags,
state = mangaState,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
description = tooltipElement?.selectFirst("div.box_text")?.text(),
chapters = null,
source = source,
@ -220,7 +220,7 @@ internal abstract class WpComicsParser(
manga.copy(
description = doc.selectFirst(selectDesc)?.html(),
altTitles = setOfNotNull(doc.selectFirst("h2.other-name")?.textOrNull()),
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = doc.selectFirst(selectState)?.let {
when (it.text()) {
in ongoing -> MangaState.ONGOING

@ -138,7 +138,7 @@ internal class XoxoComics(context: MangaLoaderContext) :
)
},
description = desc,
authors = author?.let { setOf(it) } ?: emptySet(),
authors = setOfNotNull(author),
state = state,
chapters = chaptersDeferred.await(),
)

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save