commit
32eab42c26
@ -0,0 +1,6 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.model
|
||||||
|
|
||||||
|
interface MangaSource {
|
||||||
|
|
||||||
|
val name: String
|
||||||
|
}
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.cupfox.de
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.cupfox.CupFoxParser
|
||||||
|
|
||||||
|
@MangaSourceParser("MANGAHAUS", "MangaHaus", "de")
|
||||||
|
internal class MangaHaus(context: MangaLoaderContext) :
|
||||||
|
CupFoxParser(context, MangaParserSource.MANGAHAUS, "www.mangahaus.com")
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.cupfox.fr
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.cupfox.CupFoxParser
|
||||||
|
|
||||||
|
@MangaSourceParser("ENLIGNEMANGA", "EnLigneManga", "fr")
|
||||||
|
internal class EnLigneManga(context: MangaLoaderContext) :
|
||||||
|
CupFoxParser(context, MangaParserSource.ENLIGNEMANGA, "www.enlignemanga.com")
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.cupfox.fr
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.cupfox.CupFoxParser
|
||||||
|
|
||||||
|
@MangaSourceParser("FRMANGA", "FrManga", "fr")
|
||||||
|
internal class FrManga(context: MangaLoaderContext) :
|
||||||
|
CupFoxParser(context, MangaParserSource.FRMANGA, "www.frmanga.com")
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.cupfox.fr
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.cupfox.CupFoxParser
|
||||||
|
|
||||||
|
@MangaSourceParser("SEINEMANGA", "SeineManga", "fr")
|
||||||
|
internal class SeineManga(context: MangaLoaderContext) :
|
||||||
|
CupFoxParser(context, MangaParserSource.SEINEMANGA, "www.seinemanga.com")
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.cupfox.ja
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.cupfox.CupFoxParser
|
||||||
|
|
||||||
|
@MangaSourceParser("MANGAKOINU", "MangaKoinu", "ja")
|
||||||
|
internal class MangaKoinu(context: MangaLoaderContext) :
|
||||||
|
CupFoxParser(context, MangaParserSource.MANGAKOINU, "www.mangakoinu.com")
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.cupfox.vi
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.cupfox.CupFoxParser
|
||||||
|
|
||||||
|
@MangaSourceParser("OIOIVN", "OioiVn", "vi")
|
||||||
|
internal class OioiVn(context: MangaLoaderContext) :
|
||||||
|
CupFoxParser(context, MangaParserSource.OIOIVN, "oioivn.com")
|
||||||
@ -0,0 +1,177 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.en
|
||||||
|
|
||||||
|
import androidx.collection.ArrayMap
|
||||||
|
import kotlinx.coroutines.sync.Mutex
|
||||||
|
import kotlinx.coroutines.sync.withLock
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.json.toJSONList
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("ASURASCANS", "AsuraComic", "en")
|
||||||
|
internal class AsuraScansParser(context: MangaLoaderContext) :
|
||||||
|
PagedMangaParser(context, MangaParserSource.ASURASCANS, pageSize = 30) {
|
||||||
|
|
||||||
|
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
|
||||||
|
SortOrder.RATING,
|
||||||
|
SortOrder.UPDATED,
|
||||||
|
SortOrder.NEWEST,
|
||||||
|
SortOrder.ALPHABETICAL_DESC,
|
||||||
|
SortOrder.ALPHABETICAL,
|
||||||
|
)
|
||||||
|
|
||||||
|
override val availableStates: Set<MangaState> = EnumSet.allOf(MangaState::class.java)
|
||||||
|
|
||||||
|
override val configKeyDomain = ConfigKey.Domain("asuracomic.net")
|
||||||
|
|
||||||
|
override val isMultipleTagsSupported = true
|
||||||
|
|
||||||
|
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
|
||||||
|
val url = buildString {
|
||||||
|
append("https://")
|
||||||
|
append(domain)
|
||||||
|
append("/series?page=")
|
||||||
|
append(page)
|
||||||
|
|
||||||
|
when (filter) {
|
||||||
|
is MangaListFilter.Search -> {
|
||||||
|
append("&name=")
|
||||||
|
append(filter.query.urlEncoded())
|
||||||
|
}
|
||||||
|
|
||||||
|
is MangaListFilter.Advanced -> {
|
||||||
|
|
||||||
|
if (filter.tags.isNotEmpty()) {
|
||||||
|
append("&genres=")
|
||||||
|
append(filter.tags.joinToString(separator = ",") { it.key })
|
||||||
|
}
|
||||||
|
|
||||||
|
filter.states.oneOrThrowIfMany()?.let {
|
||||||
|
append("&status=")
|
||||||
|
append(
|
||||||
|
when (it) {
|
||||||
|
MangaState.ONGOING -> "1"
|
||||||
|
MangaState.FINISHED -> "3"
|
||||||
|
MangaState.ABANDONED -> "4"
|
||||||
|
MangaState.PAUSED -> "2"
|
||||||
|
MangaState.UPCOMING -> "6"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
append("&types=-1&order=")
|
||||||
|
when (filter.sortOrder) {
|
||||||
|
SortOrder.RATING -> append("rating")
|
||||||
|
SortOrder.UPDATED -> append("update")
|
||||||
|
SortOrder.NEWEST -> append("latest")
|
||||||
|
SortOrder.ALPHABETICAL_DESC -> append("desc")
|
||||||
|
SortOrder.ALPHABETICAL -> append("asc")
|
||||||
|
else -> append("update")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
null -> append("&genres=&status=-1&order=update&types=-1")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val doc = webClient.httpGet(url).parseHtml()
|
||||||
|
return doc.select("div.grid > a[href]").map { a ->
|
||||||
|
val href = "/" + a.attrAsRelativeUrl("href")
|
||||||
|
Manga(
|
||||||
|
id = generateUid(href),
|
||||||
|
url = href,
|
||||||
|
publicUrl = href.toAbsoluteUrl(domain),
|
||||||
|
coverUrl = a.selectFirst("img")?.src().orEmpty(),
|
||||||
|
title = a.selectFirst("div.block > span.block")?.text().orEmpty(),
|
||||||
|
altTitle = null,
|
||||||
|
rating = a.selectFirst("div.block label.ml-1")?.text()?.toFloatOrNull()?.div(10f) ?: RATING_UNKNOWN,
|
||||||
|
tags = emptySet(),
|
||||||
|
author = null,
|
||||||
|
state = when (a.selectLastOrThrow("span.status").text()) {
|
||||||
|
"Ongoing" -> MangaState.ONGOING
|
||||||
|
"Completed" -> MangaState.FINISHED
|
||||||
|
"Hiatus" -> MangaState.PAUSED
|
||||||
|
"Dropped" -> MangaState.ABANDONED
|
||||||
|
"Coming Soon" -> MangaState.UPCOMING
|
||||||
|
else -> null
|
||||||
|
},
|
||||||
|
source = source,
|
||||||
|
isNsfw = isNsfwSource,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private var tagCache: ArrayMap<String, MangaTag>? = null
|
||||||
|
private val mutex = Mutex()
|
||||||
|
|
||||||
|
override suspend fun getAvailableTags(): Set<MangaTag> {
|
||||||
|
return getOrCreateTagMap().values.toSet()
|
||||||
|
}
|
||||||
|
|
||||||
|
private suspend fun getOrCreateTagMap(): Map<String, MangaTag> = mutex.withLock {
|
||||||
|
tagCache?.let { return@withLock it }
|
||||||
|
val tagMap = ArrayMap<String, MangaTag>()
|
||||||
|
val json =
|
||||||
|
webClient.httpGet("https://gg.$domain/api/series/filters").parseJson().getJSONArray("genres").toJSONList()
|
||||||
|
for (el in json) {
|
||||||
|
if (el.getString("name").isEmpty()) continue
|
||||||
|
tagMap[el.getString("name")] = MangaTag(
|
||||||
|
key = el.getInt("id").toString(),
|
||||||
|
title = el.getString("name"),
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
tagCache = tagMap
|
||||||
|
return@withLock tagMap
|
||||||
|
}
|
||||||
|
|
||||||
|
private val regexDate = """(\d+)(st|nd|rd|th)""".toRegex()
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga {
|
||||||
|
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
val tagMap = getOrCreateTagMap()
|
||||||
|
val selectTag = doc.select("div[class^=space] > div.flex > button.text-white")
|
||||||
|
val tags = selectTag.mapNotNullToSet { tagMap[it.text()] }
|
||||||
|
return manga.copy(
|
||||||
|
description = doc.selectFirst("span.font-medium.text-sm")?.text().orEmpty(),
|
||||||
|
tags = tags,
|
||||||
|
author = doc.selectFirst("div.grid > div:has(h3:eq(0):containsOwn(Author)) > h3:eq(1)")?.text(),
|
||||||
|
chapters = doc.select("div.scrollbar-thumb-themecolor > div.group").mapChapters(reversed = true) { i, div ->
|
||||||
|
val a = div.selectLastOrThrow("a")
|
||||||
|
val urlRelative = "/series/" + a.attrAsRelativeUrl("href")
|
||||||
|
val url = urlRelative.toAbsoluteUrl(domain)
|
||||||
|
val date = div.selectFirst("h3:eq(1)")!!.ownText()
|
||||||
|
val cleanDate = date.replace(regexDate, "$1")
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(url),
|
||||||
|
name = div.selectFirstOrThrow("h3:eq(0)").text(),
|
||||||
|
number = i + 1f,
|
||||||
|
volume = 0,
|
||||||
|
url = url,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = SimpleDateFormat("MMMM d yyyy", Locale.US)
|
||||||
|
.tryParse(cleanDate),
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
return doc.select("div > img[alt=chapter]").map { img ->
|
||||||
|
val urlPage = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(urlPage),
|
||||||
|
url = urlPage,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,10 +1,12 @@
|
|||||||
package org.koitharu.kotatsu.parsers.site.foolslide.en
|
package org.koitharu.kotatsu.parsers.site.foolslide.en
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.Broken
|
||||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
import org.koitharu.kotatsu.parsers.site.foolslide.FoolSlideParser
|
import org.koitharu.kotatsu.parsers.site.foolslide.FoolSlideParser
|
||||||
|
|
||||||
|
@Broken
|
||||||
@MangaSourceParser("READER_EVILFLOWERS", "Evil Flowers", "en")
|
@MangaSourceParser("READER_EVILFLOWERS", "Evil Flowers", "en")
|
||||||
internal class ReaderEvilflowers(context: MangaLoaderContext) :
|
internal class ReaderEvilflowers(context: MangaLoaderContext) :
|
||||||
FoolSlideParser(context, MangaSource.READER_EVILFLOWERS, "reader.evilflowers.com")
|
FoolSlideParser(context, MangaParserSource.READER_EVILFLOWERS, "reader.evilflowers.com")
|
||||||
|
|||||||
@ -1,12 +1,14 @@
|
|||||||
package org.koitharu.kotatsu.parsers.site.foolslide.it
|
package org.koitharu.kotatsu.parsers.site.foolslide.it
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.Broken
|
||||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
import org.koitharu.kotatsu.parsers.model.MangaParserSource
|
||||||
import org.koitharu.kotatsu.parsers.site.foolslide.FoolSlideParser
|
import org.koitharu.kotatsu.parsers.site.foolslide.FoolSlideParser
|
||||||
|
|
||||||
|
@Broken
|
||||||
@MangaSourceParser("POWERMANGA", "PowerManga", "it")
|
@MangaSourceParser("POWERMANGA", "PowerManga", "it")
|
||||||
internal class PowerManga(context: MangaLoaderContext) :
|
internal class PowerManga(context: MangaLoaderContext) :
|
||||||
FoolSlideParser(context, MangaSource.POWERMANGA, "reader.powermanga.org") {
|
FoolSlideParser(context, MangaParserSource.POWERMANGA, "reader.powermanga.org") {
|
||||||
override val pagination = false
|
override val pagination = false
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,336 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.fr
|
||||||
|
|
||||||
|
import kotlinx.coroutines.Dispatchers
|
||||||
|
import kotlinx.coroutines.async
|
||||||
|
import kotlinx.coroutines.awaitAll
|
||||||
|
import kotlinx.coroutines.coroutineScope
|
||||||
|
import kotlinx.coroutines.delay
|
||||||
|
import kotlinx.coroutines.withContext
|
||||||
|
import okhttp3.Headers
|
||||||
|
import okhttp3.Request
|
||||||
|
import okhttp3.RequestBody
|
||||||
|
import okhttp3.RequestBody.Companion.toRequestBody
|
||||||
|
import org.json.JSONArray
|
||||||
|
import org.jsoup.Jsoup
|
||||||
|
import org.jsoup.nodes.Document
|
||||||
|
import org.jsoup.nodes.Element
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.json.getIntOrDefault
|
||||||
|
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("MANGAMANA", "MangaMana", "fr")
|
||||||
|
internal class MangaMana(context: MangaLoaderContext) : PagedMangaParser(context, MangaParserSource.MANGAMANA, 25) {
|
||||||
|
|
||||||
|
override val availableSortOrders: Set<SortOrder> =
|
||||||
|
EnumSet.of(
|
||||||
|
SortOrder.UPDATED,
|
||||||
|
SortOrder.RATING,
|
||||||
|
SortOrder.ALPHABETICAL,
|
||||||
|
SortOrder.ALPHABETICAL_DESC,
|
||||||
|
SortOrder.NEWEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
override val availableStates: Set<MangaState> =
|
||||||
|
EnumSet.of(MangaState.ONGOING, MangaState.FINISHED, MangaState.ABANDONED)
|
||||||
|
|
||||||
|
override val configKeyDomain = ConfigKey.Domain("www.manga-mana.com")
|
||||||
|
|
||||||
|
override val isMultipleTagsSupported = false
|
||||||
|
|
||||||
|
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
|
||||||
|
|
||||||
|
val postData = buildString {
|
||||||
|
append("page=")
|
||||||
|
append(page)
|
||||||
|
when (filter) {
|
||||||
|
is MangaListFilter.Search -> {
|
||||||
|
if (page > 1) {
|
||||||
|
return emptyList()
|
||||||
|
}
|
||||||
|
val domainCdn = "cdn" + domain.removePrefix("www")
|
||||||
|
val json = webClient.httpGet("https://$domain/search-live?q=${filter.query}").parseJsonArray()
|
||||||
|
return json.mapJSON { jo ->
|
||||||
|
val slug = jo.getString("slug") ?: throw Exception("Missing Slug")
|
||||||
|
val url = "https://$domain/m/$slug"
|
||||||
|
val img = "https://$domainCdn/uploads/manga/$slug/cover/cover_thumb.jpg"
|
||||||
|
Manga(
|
||||||
|
id = generateUid(url),
|
||||||
|
title = jo.getString("name").orEmpty(),
|
||||||
|
coverUrl = img,
|
||||||
|
altTitle = jo.getString("otherNames").orEmpty(),
|
||||||
|
author = null,
|
||||||
|
isNsfw = when (jo.getIntOrDefault("caution", 0)) {
|
||||||
|
0 -> false
|
||||||
|
2 -> true
|
||||||
|
else -> false
|
||||||
|
},
|
||||||
|
rating = RATING_UNKNOWN,
|
||||||
|
url = url,
|
||||||
|
description = jo.getString("summary_old").orEmpty(),
|
||||||
|
publicUrl = url,
|
||||||
|
tags = emptySet(),
|
||||||
|
state = when (jo.getIntOrDefault("status_id_fr", 4)) {
|
||||||
|
1 -> MangaState.ONGOING
|
||||||
|
2 -> MangaState.FINISHED
|
||||||
|
3 -> MangaState.ABANDONED
|
||||||
|
else -> null
|
||||||
|
},
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
is MangaListFilter.Advanced -> {
|
||||||
|
|
||||||
|
if (filter.sortOrder == SortOrder.UPDATED) {
|
||||||
|
|
||||||
|
if (filter.tags.isNotEmpty() or filter.states.isNotEmpty()) {
|
||||||
|
throw IllegalArgumentException("Le filtrage par « tri par : mis à jour » avec les genres ou les statuts n'est pas pris en charge par cette source.")
|
||||||
|
}
|
||||||
|
|
||||||
|
val doc = webClient.httpGet("https://$domain/?page=$page").parseHtml()
|
||||||
|
return doc.select("div.row div.col_home").map { div ->
|
||||||
|
val href = div.selectFirstOrThrow("h4 a").attrAsRelativeUrl("href")
|
||||||
|
val isNsfw = div.selectFirst("img[data-adult]")?.attr("data-adult")?.isNotEmpty() ?: false
|
||||||
|
val img = if (isNsfw) {
|
||||||
|
div.selectFirst("img")?.attr("data-adult")
|
||||||
|
} else {
|
||||||
|
div.selectFirst("img")?.attr("data-src")?.replace(" ", "")
|
||||||
|
}
|
||||||
|
Manga(
|
||||||
|
id = generateUid(href),
|
||||||
|
title = div.select("h4").text(),
|
||||||
|
altTitle = null,
|
||||||
|
url = href,
|
||||||
|
publicUrl = href.toAbsoluteUrl(domain),
|
||||||
|
rating = RATING_UNKNOWN,
|
||||||
|
isNsfw = isNsfw,
|
||||||
|
coverUrl = img.orEmpty(),
|
||||||
|
description = null,
|
||||||
|
tags = emptySet(),
|
||||||
|
state = null,
|
||||||
|
author = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
filter.tags.oneOrThrowIfMany()?.let {
|
||||||
|
append("&category=")
|
||||||
|
append(it.key)
|
||||||
|
}
|
||||||
|
|
||||||
|
filter.states.oneOrThrowIfMany()?.let {
|
||||||
|
append("&status=")
|
||||||
|
append(
|
||||||
|
when (it) {
|
||||||
|
MangaState.ONGOING -> "1"
|
||||||
|
MangaState.FINISHED -> "2"
|
||||||
|
MangaState.ABANDONED -> "3"
|
||||||
|
else -> ""
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
append("&sort_by=")
|
||||||
|
when (filter.sortOrder) {
|
||||||
|
SortOrder.RATING -> append("score&sort_dir=desc")
|
||||||
|
SortOrder.NEWEST -> append("updated_at&sort_dir=desc")
|
||||||
|
SortOrder.ALPHABETICAL -> append("name&sort_dir=asc")
|
||||||
|
SortOrder.ALPHABETICAL_DESC -> append("name&sort_dir=desc")
|
||||||
|
else -> append("updated_at&sort_dir=desc")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
null -> append("&sort_by=updated_at&sort_dir=desc")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val url = "https://$domain/liste-mangas"
|
||||||
|
val token = webClient.httpGet(url).parseHtml().selectFirstOrThrow("meta[name=csrf-token]").attr("content")
|
||||||
|
val headers = Headers.Builder().add("X-CSRF-TOKEN", token).add("X-Requested-With", "XMLHttpRequest")
|
||||||
|
.add("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8").build()
|
||||||
|
val doc = makeRequest(url, postData.toRequestBody(), headers)
|
||||||
|
|
||||||
|
return doc.select("div.p-2 div.col").map { div ->
|
||||||
|
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||||
|
val isNsfw = div.selectFirst("img[data-adult]")?.attr("data-adult")?.isNotEmpty() ?: false
|
||||||
|
val img = if (isNsfw) {
|
||||||
|
div.selectFirst("img")?.attr("data-adult")
|
||||||
|
} else {
|
||||||
|
div.selectFirst("img")?.attr("data-src")?.replace(" ", "")
|
||||||
|
}
|
||||||
|
Manga(
|
||||||
|
id = generateUid(href),
|
||||||
|
title = div.select("h2.fs-6").text(),
|
||||||
|
altTitle = doc.selectFirst(".mangalist_item_othernames")?.text().orEmpty(),
|
||||||
|
url = href,
|
||||||
|
publicUrl = href.toAbsoluteUrl(domain),
|
||||||
|
rating = div.getElementById("avgrating")?.ownText()?.toFloat()?.div(5f) ?: RATING_UNKNOWN,
|
||||||
|
isNsfw = isNsfw,
|
||||||
|
coverUrl = img.orEmpty(),
|
||||||
|
description = div.selectFirst(".mangalist_item_description")?.text().orEmpty(),
|
||||||
|
tags = div.select("div.mb-1 a").mapNotNullToSet {
|
||||||
|
val key = it.attr("href").substringAfterLast('=')
|
||||||
|
MangaTag(
|
||||||
|
key = key,
|
||||||
|
title = it.text(),
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
state = null,
|
||||||
|
author = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private suspend fun makeRequest(url: String, payload: RequestBody, headers: Headers): Document {
|
||||||
|
var retryCount = 0
|
||||||
|
val backoffDelay = 2000L // Initial delay (milliseconds)
|
||||||
|
val request = Request.Builder().url(url).post(payload).headers(headers).build()
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
return Jsoup.parse(context.httpClient.newCall(request).execute().parseJson().getString("html"))
|
||||||
|
|
||||||
|
} catch (e: Exception) {
|
||||||
|
// Log or handle the exception as needed
|
||||||
|
if (++retryCount <= 5) {
|
||||||
|
withContext(Dispatchers.Default) {
|
||||||
|
delay(backoffDelay)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||||
|
val mangaUrl = manga.url.toAbsoluteUrl(domain)
|
||||||
|
val doc = webClient.httpGet(mangaUrl).parseHtml()
|
||||||
|
val maxPageChapterSelect = doc.select("ul.pagination a.page-link")
|
||||||
|
var maxPageChapter = 1
|
||||||
|
if (!maxPageChapterSelect.isNullOrEmpty()) {
|
||||||
|
maxPageChapterSelect.map {
|
||||||
|
val i = it.attr("href").substringAfterLast("=").toInt()
|
||||||
|
if (i > maxPageChapter) {
|
||||||
|
maxPageChapter = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
manga.copy(
|
||||||
|
state = when (doc.select("div.show_details div.d-flex:contains(Statut) span").text()) {
|
||||||
|
"En Cours" -> MangaState.ONGOING
|
||||||
|
"Terminé" -> MangaState.FINISHED
|
||||||
|
"Abandonné" -> MangaState.ABANDONED
|
||||||
|
else -> null
|
||||||
|
},
|
||||||
|
author = doc.selectFirst("div.show_details span[itemprop=author]")?.text().orEmpty(),
|
||||||
|
description = doc.selectFirst("dd[itemprop=description]")?.text(),
|
||||||
|
rating = doc.getElementById("avgrating")?.ownText()?.toFloat()?.div(5f) ?: RATING_UNKNOWN,
|
||||||
|
tags = doc.select("ul.list-unstyled li a.category").mapNotNullToSet {
|
||||||
|
val key = it.attr("href").substringAfterLast('=')
|
||||||
|
MangaTag(
|
||||||
|
key = key,
|
||||||
|
title = it.text(),
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
chapters = run {
|
||||||
|
if (maxPageChapter == 1) {
|
||||||
|
parseChapters(doc)
|
||||||
|
} else {
|
||||||
|
coroutineScope {
|
||||||
|
val result = ArrayList(parseChapters(doc))
|
||||||
|
result.ensureCapacity(result.size * maxPageChapter)
|
||||||
|
(2..maxPageChapter).map { i ->
|
||||||
|
async {
|
||||||
|
loadChapters(mangaUrl, i)
|
||||||
|
}
|
||||||
|
}.awaitAll()
|
||||||
|
.flattenTo(result)
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}.reversed(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private suspend fun loadChapters(baseUrl: String, page: Int): List<MangaChapter> {
|
||||||
|
return parseChapters(webClient.httpGet("$baseUrl?page=$page").parseHtml().body())
|
||||||
|
}
|
||||||
|
|
||||||
|
private val dateFormat = SimpleDateFormat("d MMM yyyy", sourceLocale)
|
||||||
|
|
||||||
|
private fun parseChapters(doc: Element): List<MangaChapter> {
|
||||||
|
return doc.select("ul.list-unstyled li a.chapter_link")
|
||||||
|
.mapChapters { i, a ->
|
||||||
|
val href = a.attrAsRelativeUrl("href")
|
||||||
|
val name = a.selectFirst(".chapter div")?.html()?.substringBefore("<") ?: "Chapitre $i"
|
||||||
|
val dateText = a.selectFirst(".small")?.text()
|
||||||
|
val chapterN = href.substringAfterLast('/').replace("-", ".").replace("[^0-9.]".toRegex(), "").toFloat()
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(href),
|
||||||
|
name = name,
|
||||||
|
number = chapterN,
|
||||||
|
volume = 0,
|
||||||
|
url = href,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = dateFormat.tryParse(dateText),
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val chapterUrl = chapter.url.toAbsoluteUrl(domain)
|
||||||
|
val docs = webClient.httpGet(chapterUrl).parseHtml()
|
||||||
|
|
||||||
|
val cdn = docs.selectFirstOrThrow("script:containsData(var cdn = )").data().substringAfterLast("var cdn = \"")
|
||||||
|
.substringBefore('"')
|
||||||
|
val domainCdn = cdn + domain.removePrefix("www")
|
||||||
|
val slugManga = chapterUrl.substringAfter("/m/").substringBeforeLast('/')
|
||||||
|
val slugChapter = chapterUrl.substringAfterLast('/')
|
||||||
|
|
||||||
|
val script = docs.selectFirstOrThrow("script:containsData(var pages =)")
|
||||||
|
val json = JSONArray(script.data().substringAfter("pages = ").substringBefore("; var next_chapter"))
|
||||||
|
val pages = ArrayList<MangaPage>(json.length())
|
||||||
|
for (i in 0 until json.length()) {
|
||||||
|
val img = json.getJSONObject(i).getString("image")
|
||||||
|
val v = json.getJSONObject(i).getInt("version")
|
||||||
|
val url = "https://$domainCdn/uploads/manga/$slugManga/chapters_fr/$slugChapter/$img?$v"
|
||||||
|
pages.add(
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return pages
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getAvailableTags(): Set<MangaTag> {
|
||||||
|
val doc = webClient.httpGet("https://$domain/liste-mangas").parseHtml()
|
||||||
|
return doc.select("select.selectpicker option").drop(1).mapNotNullToSet {
|
||||||
|
MangaTag(
|
||||||
|
key = it.attr("value"),
|
||||||
|
title = it.text(),
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue