Merge remote-tracking branch 'upstream/master'

master
Naga 2 years ago
commit 9c0c20f86b

2
.idea/.gitignore vendored

@ -1,3 +1,5 @@
# Default ignored files
/shelf/
/workspace.xml
# GitHub Copilot persisted chat sessions
/copilot/chatSessions

@ -94,6 +94,7 @@ class Manga(
largeCoverUrl: String? = this.largeCoverUrl,
description: String? = this.description,
chapters: List<MangaChapter>? = this.chapters,
source: MangaSource = this.source,
) = Manga(
id = id,
title = title,
@ -109,7 +110,7 @@ class Manga(
largeCoverUrl = largeCoverUrl,
description = description,
chapters = chapters,
source = source
source = source,
)
override fun equals(other: Any?): Boolean {

@ -231,21 +231,19 @@ internal abstract class WebtoonsParser(
val genre = filter.tags.oneOrThrowIfMany()?.key ?: "ALL"
val genres = getAllGenreList()
val result = getAllTitleList()
var result = getAllTitleList()
val sortedResult = when (filter.sortOrder) {
SortOrder.UPDATED -> result.sortedBy { it.date }
if (genre != "ALL") {
result = result.filter { it.manga.tags.contains(genres[genre]) }
}
when (filter.sortOrder) {
SortOrder.UPDATED -> result.sortedByDescending { it.date }
SortOrder.POPULARITY -> result.sortedByDescending { it.readCount }
SortOrder.RATING -> result.sortedByDescending { it.manga.rating }
//SortOrder.LIKE -> result.sortedBy { it.likeitCount }
else -> throw IllegalArgumentException("Unsupported sort order: ${filter.sortOrder}")
}
if (genre != "ALL") {
sortedResult.filter { it.manga.tags.contains(genres[genre]) }
} else {
sortedResult
}
}
else -> getAllTitleList()

@ -1,41 +1,79 @@
package org.koitharu.kotatsu.parsers.site.en
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.delay
import kotlinx.coroutines.withContext
import okhttp3.Headers
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.Request
import okhttp3.RequestBody
import okhttp3.RequestBody.Companion.toRequestBody
import org.json.JSONObject
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.ErrorMessages
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.network.UserAgents
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
import java.text.DateFormat
import java.text.SimpleDateFormat
import java.util.*
import kotlin.random.Random
private const val TOO_MANY_REQUESTS = 429
private const val MAX_RETRY_COUNT = 5
@MangaSourceParser("REAPERCOMICS", "ReaperComics", "en")
internal class ReaperComics(context: MangaLoaderContext) :
PagedMangaParser(context, MangaSource.REAPERCOMICS, pageSize = 30) {
PagedMangaParser(context, MangaSource.REAPERCOMICS, pageSize = 32) {
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.UPDATED, SortOrder.ALPHABETICAL)
override val configKeyDomain = ConfigKey.Domain("reaperscans.com")
override val isSearchSupported = false
private val userAgentKey = ConfigKey.UserAgent(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
)
override val headers: Headers = Headers.Builder()
.add("User-Agent", UserAgents.CHROME_DESKTOP)
.build()
private val baseHeaders: Headers
get() = Headers.Builder().add("User-Agent", config[userAgentKey]).build()
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
override val headers
get() = getApiHeaders()
private val selectTotalChapter = "dl.mt-2 div:nth-child(5) > dd"
private val selectState = "dl.mt-2 div:nth-child(4) > dd"
private val searchCache = mutableSetOf<Manga>() // Cache search results
private val chapterCache = mutableMapOf<String, Manga>() // Cache chapter lists
private fun getApiHeaders(): Headers {
val userCookie = context.cookieJar.getCookies(domain).find {
it.name == "user"
} ?: return baseHeaders
val jo = JSONObject(userCookie.value.urlDecode())
val accessToken = jo.getStringOrNull("access_token") ?: return baseHeaders
return baseHeaders.newBuilder().add("authorization", "bearer $accessToken").build()
}
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when (filter) {
is MangaListFilter.Search -> {
throw IllegalArgumentException(ErrorMessages.SEARCH_NOT_SUPPORTED) // TODO
val searchTitle = filter.query.trim()
if (searchCache.isNotEmpty()) {
if (page > 1) {
return emptyList()
}
return searchCache.filter { it.title.contains(searchTitle, ignoreCase = true) }
} else {
return searchAllPage(page, searchTitle)
}
}
is MangaListFilter.Advanced -> {
@ -53,13 +91,46 @@ internal class ReaperComics(context: MangaLoaderContext) :
}
}
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
private fun parseMangaList(docs: Document): List<Manga> {
/**
* Search once all pages and stores them in cache
*
* @param page the page to start from
* @param searchTitle the title to search for
* @return the list of manga
*/
private suspend fun searchAllPage(page: Int, searchTitle: String): List<Manga> {
var currentPage = page
val url = buildString {
append("https://")
append(domain)
append("/comics?page=")
}
while (true) {
try {
val allEntries = parseMangaList(webClient.httpGet(url + currentPage).parseHtml())
if (allEntries.isEmpty()) {
break
}
searchCache.addAll(allEntries)
currentPage++
} catch (e: Exception) {
println("Error parsing page $currentPage: ${e.message}")
break
}
}
return searchCache.filter { it.title.contains(searchTitle, ignoreCase = true) }.toList()
}
/**
* Parse the list of manga from the given document
*
* @param docs the document to parse
* @return the list of manga
*/
private fun parseMangaList(docs: Document): List<Manga> {
return docs.select("main div.relative, main li.col-span-1").map {
val a = it.selectFirstOrThrow("a")
val url = a.attrAsAbsoluteUrl("href")
@ -82,23 +153,107 @@ internal class ReaperComics(context: MangaLoaderContext) :
override suspend fun getAvailableTags(): Set<MangaTag> = emptySet()
companion object {
private val JSON_MEDIA_TYPE = "application/json; charset=utf-8".toMediaType()
}
private fun chapterListNextPageSelector(): String = "button[wire:click*=nextPage]"
private fun chapterListSelector() = "div[wire:id] > div > ul[role=list] > li"
override suspend fun getDetails(manga: Manga): Manga {
val cachedChapters = chapterCache[manga.url]
if (cachedChapters != null) {
return cachedChapters
}
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val simpleDateFormat = SimpleDateFormat("dd/MM/yyyy", sourceLocale)
var totalChapters = (doc.selectFirst(selectTotalChapter)?.text()?.toIntOrNull() ?: 0) - 1
val chapters = mutableSetOf<MangaChapter>()
var hasNextPage = doc.selectFirst(chapterListNextPageSelector()) != null
chapters.addAll(
doc.select(chapterListSelector()).mapChapters { _, li ->
val a = li.selectFirstOrThrow("a")
val chapterUrl = a.attr("href").toRelativeUrl(domain)
MangaChapter(
id = generateUid(chapterUrl),
name = li.selectFirst("div.truncate p.truncate")?.text().orEmpty(),
number = totalChapters--,
url = chapterUrl,
scanlator = null,
uploadDate = parseChapterDate(
simpleDateFormat,
li.selectFirst("div.truncate div.items-center")?.text(),
),
branch = null,
source = source,
)
},
)
if (!hasNextPage) {
return manga.copy(
description = doc.selectFirst("div.p-4 p.prose")?.html(),
state = when (doc.selectFirst("dl.mt-2 div:contains(Status) dd")?.text()?.lowercase()) {
state = when (doc.selectFirst(selectState)?.text()?.lowercase()) {
"ongoing" -> MangaState.ONGOING
"complete" -> MangaState.FINISHED
else -> null
},
chapters = doc.select("div.p-2 div.pb-4 ul li").mapChapters(reversed = true) { i, li ->
chapters = chapters.reversed(),
)
}
val csrfToken = doc.selectFirst("meta[name=csrf-token]")?.attr("content") ?: error("Couldn't find csrf-token")
val livewareData = doc.selectFirst("div[wire:initial-data*=Models\\\\Comic]")?.attr("wire:initial-data")
?.let { JSONObject(it) } ?: error("Couldn't find LiveWireData")
val routeName =
livewareData.getJSONObject("fingerprint").getStringOrNull("name") ?: error("Couldn't find routeName")
val fingerprint = livewareData.getJSONObject("fingerprint")
var serverMemo = livewareData.getJSONObject("serverMemo")
var pageToQuery = 2
// Javascript: (Math.random() + 1).toString(36).substring(8)
val generateId = { ->
"1.${
Random.nextLong().toString(36)
}".substring(10)
} // Not exactly the same, but results in a 3-5 character string
while (hasNextPage) {
//need to format the payload to the expected response format since org.json.JSONObject are not ordered, and the server seems to care about the order of the keys
val payload = String.format(
responseTemplate,
fingerprint.getString("id"),
fingerprint.getString("path"),
serverMemo.getString("htmlHash"),
pageToQuery - 1,
pageToQuery - 1,
serverMemo.getJSONObject("dataMeta").getJSONObject("models").getJSONObject("comic").getString("id"),
serverMemo.getString("checksum"),
generateId(),
pageToQuery,
).toRequestBody(JSON_MEDIA_TYPE)
val headers = Headers.Builder().add("x-csrf-token", csrfToken).add("x-livewire", "true").build()
val responseData =
makeRequest("https://$domain/livewire/message/$routeName", payload, headers)
// response contains state that we need to preserve
serverMemo = mergeLeft(serverMemo, responseData.serverMemo)
val chaptersHtml = Jsoup.parse(responseData.effects.html, "https://$domain")
chapters.addAll(
chaptersHtml.select(chapterListSelector()).mapChapters { _, li ->
val a = li.selectFirstOrThrow("a")
val chapterUrl = a.attrAsAbsoluteUrl("href").toRelativeUrl(domain)
val chapterUrl = a.attr("href").toRelativeUrl(domain)
MangaChapter(
id = generateUid(chapterUrl),
name = li.selectFirst("div.truncate p.truncate")?.text().orEmpty(),
number = i + 1,
number = totalChapters--,
url = chapterUrl,
scanlator = null,
uploadDate = parseChapterDate(
@ -110,6 +265,67 @@ internal class ReaperComics(context: MangaLoaderContext) :
)
},
)
hasNextPage = chaptersHtml.selectFirst(chapterListNextPageSelector()) != null
pageToQuery++
}
val copy = manga.copy(
description = doc.selectFirst("div.p-4 p.prose")?.html(),
state = when (doc.selectFirst(selectState)?.text()?.lowercase()) {
"ongoing" -> MangaState.ONGOING
"complete" -> MangaState.FINISHED
else -> null
},
chapters = chapters.reversed(),
)
chapterCache[manga.url] = copy
return copy
}
private suspend fun makeRequest(url: String, payload: RequestBody, headers: Headers): LiveWireResponseDto {
var retryCount = 0
val backoffDelay = 2000L // Initial delay (milliseconds)
val request = Request.Builder().url(url).post(payload).headers(headers).build()
while (true) {
try {
val response = context.httpClient.newCall(request).execute().parseJson()
val effectsJson = response.getJSONObject("effects")
val serverMemoJson = response.getJSONObject("serverMemo")
val effects = LiveWireEffectsDto(effectsJson.getString("html"))
return LiveWireResponseDto(effects, serverMemoJson)
} catch (e: Exception) {
// Log or handle the exception as needed
if (++retryCount <= MAX_RETRY_COUNT) {
withContext(Dispatchers.Default) {
delay(backoffDelay)
}
} else {
throw e
}
}
}
}
/**
* Recursively merges j2 onto j1 in place
* If j1 and j2 both contain keys whose values aren't both jsonObjects, j2's value overwrites j1's
*
*/
private fun mergeLeft(j1: JSONObject, j2: JSONObject): JSONObject {
for (key in j2.keys()) {
val j1Value = j1.opt(key)
if (j1Value !is JSONObject) {
j1.put(key, j2[key])
} else if (j2[key] is JSONObject) {
j1.put(key, mergeLeft(j1Value, j2.getJSONObject(key)))
}
}
return j1
}
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
@ -147,4 +363,18 @@ internal class ReaperComics(context: MangaLoaderContext) :
)
}
}
private class LiveWireResponseDto(
val effects: LiveWireEffectsDto,
val serverMemo: JSONObject,
)
private class LiveWireEffectsDto(
val html: String,
)
//!IMPORTANT
private val responseTemplate =
"""{"fingerprint":{"id":"%s","name":"frontend.comic-chapter-list","locale":"en","path":"%s","method":"GET","v":"acj"},"serverMemo":{"children":[],"errors":[],"htmlHash":"%s","data":{"comic":[],"page":%d,"paginators":{"page":%d}},"dataMeta":{"models":{"comic":{"class":"App\\Models\\Comic","id":"%s","relations":[],"connection":"pgsql","collectionClass":null}}},"checksum":"%s"},"updates":[{"type":"callMethod","payload":{"id":"%s","method":"gotoPage","params":[%d,"page"]}}]}"""
}

@ -15,6 +15,7 @@ import org.koitharu.kotatsu.parsers.util.json.toJSONList
import java.text.SimpleDateFormat
import java.util.*
internal abstract class NepnepParser(
context: MangaLoaderContext,
source: MangaSource,
@ -23,7 +24,8 @@ internal abstract class NepnepParser(
override val configKeyDomain = ConfigKey.Domain(domain)
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.ALPHABETICAL)
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.ALPHABETICAL, SortOrder.POPULARITY, SortOrder.UPDATED)
override val availableStates: Set<MangaState> = EnumSet.allOf(MangaState::class.java)
override val isTagsExclusionSupported = true
@ -31,16 +33,18 @@ internal abstract class NepnepParser(
.add("User-Agent", UserAgents.CHROME_DESKTOP)
.build()
override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
if (offset > 0) {
return emptyList()
private val searchDoc = SoftSuspendLazy {
webClient.httpGet("https://$domain/search/").parseHtml()
}
var foundTag = true
var foundTagExclude = true
var foundState = true
data class MangaWithLastUpdate(
val manga: Manga,
val lastUpdate: Long,
val views: String,
)
val doc = webClient.httpGet("https://$domain/search/").parseHtml()
override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
val doc = searchDoc.get()
val json = JSONArray(
doc.selectFirstOrThrow("script:containsData(MainFunction)").data()
.substringAfter("vm.Directory = ")
@ -48,53 +52,46 @@ internal abstract class NepnepParser(
.trim()
.replace(';', ' '),
)
val manga = ArrayList<Manga>(json.length())
val mangaWithLastUpdateList = ArrayList<MangaWithLastUpdate>(json.length())
var sort = false
for (i in 0 until json.length()) {
val m = json.getJSONObject(i)
val href = "/manga/" + m.getString("i")
val imgUrl = "https://temp.compsci88.com/cover/" + m.getString("i") + ".jpg"
val lastUpdate = m.getLong("lt")
val views = m.getString("v")
val viewMonth = m.getString("vm")
when (filter) {
is MangaListFilter.Search -> {
if (m.getString("s").contains(filter.query, ignoreCase = true) || m.getString("al")
.contains(filter.query, ignoreCase = true)
if (m.getString("s").contains(filter.query, ignoreCase = true) || (m.getJSONArray("al")
.length() > 0 && m.getJSONArray("al").getString(0)
.contains(filter.query, ignoreCase = true))
) {
manga.add(
addManga(href, imgUrl, m),
mangaWithLastUpdateList.add(
MangaWithLastUpdate(addManga(href, imgUrl, m), lastUpdate, views),
)
}
}
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty()) {
val tagsJon = m.getJSONArray("g").toString()
filter.tags.forEach {
foundTag = false
if (tagsJon.contains(it.key, ignoreCase = true)) {
foundTag = true
}
}
}
if (filter.tagsExclude.isNotEmpty()) {
val tagsJon = m.getJSONArray("g").toString()
filter.tagsExclude.forEach {
foundTagExclude = false
if (!tagsJon.contains(it.key, ignoreCase = true)) {
foundTagExclude = true
}
}
val tags = filter.tags
val tagsExcluded = filter.tagsExclude
val tagsJson = m.getJSONArray("g").toString()
val tagsMatched =
tags.isEmpty() || tags.all { tag -> tagsJson.contains(tag.key, ignoreCase = true) }
val tagsExcludeMatched = tagsExcluded.isEmpty() || tagsExcluded.none { tag ->
tagsJson.contains(
tag.key,
ignoreCase = true,
)
}
if (filter.states.isNotEmpty()) {
val stateJson = m.getString("ps")
filter.states.oneOrThrowIfMany().let {
foundState = false
if (stateJson.contains(
when (it) {
val statesMatched = filter.states.isEmpty() || filter.states.any { state ->
m.getString("ps").contains(
when (state) {
MangaState.ONGOING -> "Ongoing"
MangaState.FINISHED -> "Complete"
MangaState.ABANDONED -> "Cancelled"
@ -103,25 +100,35 @@ internal abstract class NepnepParser(
},
ignoreCase = true,
)
) {
foundState = true
}
}
}
if (tagsMatched && tagsExcludeMatched && statesMatched) {
mangaWithLastUpdateList.add(
MangaWithLastUpdate(addManga(href, imgUrl, m), lastUpdate, views),
)
if (foundTag && foundState && foundTagExclude) {
manga.add(addManga(href, imgUrl, m))
}
sort = true
}
null -> {
manga.add(
addManga(href, imgUrl, m),
mangaWithLastUpdateList.add(
MangaWithLastUpdate(addManga(href, imgUrl, m), lastUpdate, views),
)
}
}
}
return manga
if (sort) {
when (filter?.sortOrder) {
SortOrder.POPULARITY -> mangaWithLastUpdateList.sortByDescending { it.views }
SortOrder.UPDATED -> mangaWithLastUpdateList.sortByDescending { it.lastUpdate }
SortOrder.ALPHABETICAL -> {}
else -> if (filter != null) {
throw IllegalArgumentException("Unsupported sort order: ${filter.sortOrder}")
}
}
}
return mangaWithLastUpdateList.map { it.manga }
.subList(offset, (offset + 30).coerceAtMost(mangaWithLastUpdateList.size))
}
private fun addManga(href: String, imgUrl: String, m: JSONObject): Manga {
@ -141,8 +148,9 @@ internal abstract class NepnepParser(
)
}
override suspend fun getAvailableTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain/search/").parseHtml()
val doc = searchDoc.get()
val tags = doc.selectFirstOrThrow("script:containsData(vm.AvailableFilters)").data()
.substringAfter("\"Genre\"")
.substringAfter('[')

@ -4,6 +4,7 @@ import kotlinx.coroutines.flow.channelFlow
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.launch
import okhttp3.Headers
import okhttp3.HttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrlOrNull
import okhttp3.Interceptor
@ -112,7 +113,8 @@ internal abstract class GroupleParser(
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).checkAuthRequired().parseHtml()
val response = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).checkAuthRequired()
val doc = response.parseHtml()
val root = doc.body().requireElementById("mangaBox").selectFirstOrThrow("div.leftContent")
val dateFormat = SimpleDateFormat("dd.MM.yy", Locale.US)
val coverImg = root.selectFirst("div.subject-cover")?.selectFirst("img")
@ -125,7 +127,9 @@ internal abstract class GroupleParser(
} else {
null
}
val newSource = getSource(response.request.url)
return manga.copy(
source = newSource,
description = root.selectFirst("div.manga-description")?.html(),
largeCoverUrl = coverImg?.attr("data-full"),
coverUrl = coverImg?.attr("data-thumb") ?: manga.coverUrl,
@ -160,7 +164,7 @@ internal abstract class GroupleParser(
url = href,
uploadDate = dateFormat.tryParse(tr.selectFirst("td.date")?.text()),
scanlator = translators,
source = source,
source = newSource,
branch = null,
),
)
@ -177,7 +181,7 @@ internal abstract class GroupleParser(
url = link,
uploadDate = dateFormat.tryParse(jo.getStringOrNull("dateCreated")),
scanlator = null,
source = source,
source = newSource,
branch = translations[personId],
)
}
@ -187,7 +191,11 @@ internal abstract class GroupleParser(
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain) + "?mtr=1").checkAuthRequired().parseHtml()
if (chapter.source != source) { // handle redirects between websites
return context.newParserInstance(chapter.source).getPages(chapter)
}
val url = chapter.url.toAbsoluteUrl(domain).toHttpUrl().newBuilder().setQueryParameter("mtr", "1").build()
val doc = webClient.httpGet(url).checkAuthRequired().parseHtml()
val scripts = doc.select("script")
for (script in scripts) {
val data = script.html()
@ -292,6 +300,14 @@ internal abstract class GroupleParser(
return root.select("div.tile").mapNotNull(::parseManga)
}
protected open fun getSource(url: HttpUrl): MangaSource = when (url.host) {
in SeiMangaParser.domains -> MangaSource.SEIMANGA
in MintMangaParser.domains -> MangaSource.MINTMANGA
in ReadmangaParser.domains -> MangaSource.READMANGA_RU
in SelfMangaParser.domains -> MangaSource.SELFMANGA
else -> source
}
private fun getSortKey(sortOrder: SortOrder) = when (sortOrder) {
SortOrder.ALPHABETICAL -> "name"
SortOrder.POPULARITY -> "rate"

@ -10,9 +10,14 @@ internal class MintMangaParser(
context: MangaLoaderContext,
) : GroupleParser(context, MangaSource.MINTMANGA, 2) {
override val configKeyDomain = ConfigKey.Domain(
override val configKeyDomain = ConfigKey.Domain(*domains)
companion object {
val domains = arrayOf(
"24.mintmanga.one",
"mintmanga.live",
"mintmanga.com",
)
}
}

@ -10,9 +10,14 @@ internal class ReadmangaParser(
context: MangaLoaderContext,
) : GroupleParser(context, MangaSource.READMANGA_RU, 1) {
override val configKeyDomain = ConfigKey.Domain(
override val configKeyDomain = ConfigKey.Domain(*domains)
companion object {
val domains = arrayOf(
"readmanga.live",
"readmanga.io",
"readmanga.me",
)
}
}

@ -0,0 +1,21 @@
package org.koitharu.kotatsu.parsers.site.ru.grouple
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.model.MangaSource
@MangaSourceParser("SEIMANGA", "SeiManga", "ru")
internal class SeiMangaParser(
context: MangaLoaderContext,
) : GroupleParser(context, MangaSource.SEIMANGA, 21) {
override val configKeyDomain = ConfigKey.Domain(*domains)
companion object {
val domains = arrayOf(
"seimanga.me",
)
}
}

@ -11,6 +11,12 @@ internal class SelfMangaParser(
context: MangaLoaderContext,
) : GroupleParser(context, MangaSource.SELFMANGA, 3) {
override val configKeyDomain = ConfigKey.Domain("selfmanga.live")
override val configKeyDomain = ConfigKey.Domain(*domains)
companion object {
val domains = arrayOf(
"selfmanga.live",
)
}
}

Loading…
Cancel
Save