add sources
parent
7fbeb2e266
commit
55e14e4cb3
@ -0,0 +1,96 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.en
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("PO2SCANS", "Po2Scans", "en")
|
||||||
|
internal class Po2Scans(context: MangaLoaderContext) : MangaParser(context, MangaSource.PO2SCANS) {
|
||||||
|
|
||||||
|
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.ALPHABETICAL)
|
||||||
|
override val configKeyDomain = ConfigKey.Domain("po2scans.com")
|
||||||
|
|
||||||
|
override suspend fun getList(offset: Int, query: String?, tags: Set<MangaTag>?, sortOrder: SortOrder): List<Manga> {
|
||||||
|
if (offset > 0) {
|
||||||
|
return emptyList()
|
||||||
|
}
|
||||||
|
val url = buildString {
|
||||||
|
append("https://$domain/series")
|
||||||
|
if (!query.isNullOrEmpty()) {
|
||||||
|
append("?search=")
|
||||||
|
append(query.urlEncoded())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val doc = webClient.httpGet(url).parseHtml()
|
||||||
|
return doc.select(".series-list").map { div ->
|
||||||
|
val href = div.selectFirstOrThrow("a").attrAsAbsoluteUrl("href")
|
||||||
|
Manga(
|
||||||
|
id = generateUid(href),
|
||||||
|
title = div.selectFirstOrThrow("h2").text(),
|
||||||
|
altTitle = null,
|
||||||
|
url = href,
|
||||||
|
publicUrl = href.toAbsoluteUrl(domain),
|
||||||
|
rating = RATING_UNKNOWN,
|
||||||
|
isNsfw = false,
|
||||||
|
coverUrl = div.selectFirstOrThrow("img").src()?.toAbsoluteUrl(domain).orEmpty(),
|
||||||
|
tags = emptySet(),
|
||||||
|
state = null,
|
||||||
|
author = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getTags(): Set<MangaTag> = emptySet()
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga {
|
||||||
|
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
val dateFormat = SimpleDateFormat("dd MMM, yy", Locale.ENGLISH)
|
||||||
|
return manga.copy(
|
||||||
|
altTitle = null,
|
||||||
|
state = when (doc.select(".status span").last()?.text()) {
|
||||||
|
"Ongoing" -> MangaState.ONGOING
|
||||||
|
"Done" -> MangaState.FINISHED
|
||||||
|
else -> null
|
||||||
|
},
|
||||||
|
tags = emptySet(),
|
||||||
|
author = doc.select(".author span").last()?.text(),
|
||||||
|
description = doc.selectFirstOrThrow(".summary").text(),
|
||||||
|
chapters = doc.select(".chap-section .chap")
|
||||||
|
.mapChapters(reversed = true) { i, div ->
|
||||||
|
val a = div.selectFirstOrThrow("a")
|
||||||
|
val url = "/" + a.attrAsRelativeUrl("href").toAbsoluteUrl(domain)
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(url),
|
||||||
|
name = a.text(),
|
||||||
|
number = i + 1,
|
||||||
|
url = url,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = dateFormat.tryParse(div.select(".detail span").last()?.text()),
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||||
|
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||||
|
return doc.select(".swiper-slide img").map { img ->
|
||||||
|
val url = img.src()?.replace("./assets", "/assets")?.toRelativeUrl(domain)
|
||||||
|
?: img.parseFailed("Image src not found")
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,150 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.fr
|
||||||
|
|
||||||
|
import kotlinx.coroutines.coroutineScope
|
||||||
|
import org.json.JSONArray
|
||||||
|
import org.json.JSONObject
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.json.*
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("FMTEAM", "FmTeam", "fr")
|
||||||
|
internal class FmTeam(context: MangaLoaderContext) :
|
||||||
|
PagedMangaParser(context, MangaSource.FMTEAM, 0) {
|
||||||
|
|
||||||
|
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.ALPHABETICAL)
|
||||||
|
override val configKeyDomain = ConfigKey.Domain("fmteam.fr")
|
||||||
|
|
||||||
|
override suspend fun getListPage(
|
||||||
|
page: Int,
|
||||||
|
query: String?,
|
||||||
|
tags: Set<MangaTag>?,
|
||||||
|
sortOrder: SortOrder,
|
||||||
|
): List<Manga> {
|
||||||
|
if (page > 1) { return emptyList() }
|
||||||
|
|
||||||
|
val jsonManga = if(!query.isNullOrEmpty())
|
||||||
|
{
|
||||||
|
//3 letters minimum
|
||||||
|
webClient.httpGet("https://$domain/api/search/${query.urlEncoded()}").parseJson().getJSONArray("comics")
|
||||||
|
|
||||||
|
}else
|
||||||
|
{
|
||||||
|
webClient.httpGet("https://$domain/api/comics").parseJson().getJSONArray("comics")
|
||||||
|
}
|
||||||
|
|
||||||
|
val manga = ArrayList<Manga>(jsonManga.length())
|
||||||
|
for (i in 0 until jsonManga.length()) {
|
||||||
|
val j = jsonManga.getJSONObject(i)
|
||||||
|
val href = "/api" + j.getString("url")
|
||||||
|
when {
|
||||||
|
!tags.isNullOrEmpty() -> {
|
||||||
|
val a = j.getJSONArray("genres").toString()
|
||||||
|
var found = true
|
||||||
|
tags.forEach {
|
||||||
|
if (!a.contains(it.key, ignoreCase = true)) {
|
||||||
|
found = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (found) {
|
||||||
|
manga.add(
|
||||||
|
addManga(href, j)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
else -> {
|
||||||
|
manga.add(
|
||||||
|
addManga(href, j)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return manga
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun addManga(href : String, j : JSONObject): Manga {
|
||||||
|
return Manga(
|
||||||
|
id = generateUid(href),
|
||||||
|
url = href,
|
||||||
|
publicUrl = href.toAbsoluteUrl(domain),
|
||||||
|
coverUrl = j.getString("thumbnail"),
|
||||||
|
title = j.getString("title"),
|
||||||
|
description = j.getString("description"),
|
||||||
|
altTitle = j.getJSONArray("alt_titles").toString()
|
||||||
|
.replace("[\"", "")
|
||||||
|
.replace("\"]", "")
|
||||||
|
.replace("\",\"", " , "),
|
||||||
|
rating = j.getString("rating").toFloatOrNull()?.div(10f)
|
||||||
|
?: RATING_UNKNOWN,
|
||||||
|
tags = emptySet(),
|
||||||
|
author = j.getString("author"),
|
||||||
|
state = when (j.getString("status").lowercase()) {
|
||||||
|
"en cours" -> MangaState.ONGOING
|
||||||
|
"terminé" -> MangaState.FINISHED
|
||||||
|
else -> null
|
||||||
|
},
|
||||||
|
source = source,
|
||||||
|
isNsfw = when (j.getString("adult").toInt()) {
|
||||||
|
0 -> false
|
||||||
|
1 -> true
|
||||||
|
else -> true
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
override suspend fun getTags(): Set<MangaTag> = emptySet()
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||||
|
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||||
|
val json = webClient.httpGet(fullUrl).parseJson().getJSONObject("comic")
|
||||||
|
val dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.US)
|
||||||
|
val chapters = JSONArray(json.getJSONArray("chapters").toJSONList().reversed())
|
||||||
|
|
||||||
|
manga.copy(
|
||||||
|
tags = json.getJSONArray("genres").toJSONList().mapNotNullToSet {
|
||||||
|
MangaTag(
|
||||||
|
key = it.getString("slug"),
|
||||||
|
title = it.getString("name"),
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
chapters = chapters.mapJSONIndexed { i,j ->
|
||||||
|
val url = "/api" + j.getString("url").toRelativeUrl(domain)
|
||||||
|
val name = j.getString("full_title")
|
||||||
|
val date = j.getStringOrNull("updated_at")
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(url),
|
||||||
|
name = name,
|
||||||
|
number = i + 1,
|
||||||
|
url = url,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = dateFormat.tryParse(date),
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||||
|
val jsonPages = webClient.httpGet(fullUrl).parseJson().getJSONObject("chapter").getJSONArray("pages").toString()
|
||||||
|
val pages = jsonPages.replace("[", "").replace("]", "")
|
||||||
|
.replace("\\", "").split("\",\"").drop(1)
|
||||||
|
return pages.map { url ->
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,144 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.fr
|
||||||
|
|
||||||
|
import okhttp3.Headers
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("LIRESCAN", "Lire Scan", "fr")
|
||||||
|
internal class LireScan(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.LIRESCAN, 20) {
|
||||||
|
|
||||||
|
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.UPDATED)
|
||||||
|
|
||||||
|
override val configKeyDomain = ConfigKey.Domain("lire-scan.me")
|
||||||
|
|
||||||
|
override val headers: Headers = Headers.Builder()
|
||||||
|
.add("User-Agent", UserAgents.CHROME_MOBILE)
|
||||||
|
.build()
|
||||||
|
|
||||||
|
override suspend fun getListPage(
|
||||||
|
page: Int,
|
||||||
|
query: String?,
|
||||||
|
tags: Set<MangaTag>?,
|
||||||
|
sortOrder: SortOrder,
|
||||||
|
): List<Manga> {
|
||||||
|
val tag = tags.oneOrThrowIfMany()
|
||||||
|
val doc =
|
||||||
|
if (!query.isNullOrEmpty()) { // search only works with 4 or more letters
|
||||||
|
if (page > 1) {
|
||||||
|
return emptyList()
|
||||||
|
}
|
||||||
|
val q = query.urlEncoded().replace("%20", "+")
|
||||||
|
val post = "do=search&subaction=search&search_start=0&full_search=0&result_from=1&story=$q"
|
||||||
|
webClient.httpPost("https://$domain/index.php?do=search", post).parseHtml()
|
||||||
|
} else {
|
||||||
|
val url = buildString {
|
||||||
|
append("https://")
|
||||||
|
append(domain)
|
||||||
|
if (!tags.isNullOrEmpty()) {
|
||||||
|
append("/manga/")
|
||||||
|
append(tag?.key.orEmpty())
|
||||||
|
}
|
||||||
|
if (page > 1) {
|
||||||
|
append("/page/")
|
||||||
|
append(page)
|
||||||
|
append('/')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
webClient.httpGet(url).parseHtml()
|
||||||
|
}
|
||||||
|
|
||||||
|
return doc.select("div.sect__content.grid-items div.item-poster").map { div ->
|
||||||
|
val href = div.selectFirstOrThrow("a").attrAsAbsoluteUrl("href")
|
||||||
|
Manga(
|
||||||
|
id = generateUid(href),
|
||||||
|
title = div.select(".item-poster__title").text(),
|
||||||
|
altTitle = null,
|
||||||
|
url = href,
|
||||||
|
publicUrl = href.toAbsoluteUrl(domain),
|
||||||
|
rating = div.selectFirstOrThrow(".item__rating").ownText().toFloatOrNull()?.div(10f) ?: RATING_UNKNOWN,
|
||||||
|
isNsfw = false,
|
||||||
|
coverUrl = div.selectFirstOrThrow("img").attrAsAbsoluteUrl("src"),
|
||||||
|
tags = setOf(),
|
||||||
|
state = null,
|
||||||
|
author = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga {
|
||||||
|
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
val dateFormat = SimpleDateFormat("dd-MM-yyyy", Locale.FRANCE)
|
||||||
|
return manga.copy(
|
||||||
|
altTitle = root.select("ul.pmovie__list li:contains(Nom Alternatif:)").text()
|
||||||
|
.replace("Nom Alternatif:", ""),
|
||||||
|
state = when (root.select("ul.pmovie__list li:contains(Status:)").text()) {
|
||||||
|
"Status: OnGoing", "Status: En cours" -> MangaState.ONGOING
|
||||||
|
"Status: Fini" -> MangaState.FINISHED
|
||||||
|
else -> null
|
||||||
|
},
|
||||||
|
tags = root.select("ul.pmovie__list li:contains(Genre:)").text()
|
||||||
|
.replace("Genre:", "").split(" / ").mapNotNullToSet { tag ->
|
||||||
|
MangaTag(
|
||||||
|
key = tag.lowercase(),
|
||||||
|
title = tag,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
author = root.select("ul.pmovie__list li:contains(Artist(s):)").text().replace("Artist(s):", ""),
|
||||||
|
description = root.selectFirst("div.pmovie__text")?.html(),
|
||||||
|
chapters = root.select("ul li div.chapter")
|
||||||
|
.mapChapters(reversed = true) { i, div ->
|
||||||
|
val a = div.selectFirstOrThrow("a")
|
||||||
|
val href = a.attrAsRelativeUrl("href")
|
||||||
|
val name = a.text()
|
||||||
|
val dateText = div.select("p").last()?.text()
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(href),
|
||||||
|
name = name,
|
||||||
|
number = i,
|
||||||
|
url = href,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = dateFormat.tryParse(dateText),
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||||
|
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||||
|
val pages = doc.selectFirstOrThrow("script:containsData(const manga = )").data()
|
||||||
|
.substringAfter("chapter1: [\"").substringBefore("\"]")
|
||||||
|
.split("\",\"")
|
||||||
|
return pages.map { img ->
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(img),
|
||||||
|
url = img,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getTags(): Set<MangaTag> {
|
||||||
|
val doc = webClient.httpGet("https://$domain/").parseHtml()
|
||||||
|
return doc.select(".nav-menu li a").mapNotNullToSet { a ->
|
||||||
|
val key = a.attr("href").removeSuffix('/').substringAfterLast("manga/", "")
|
||||||
|
MangaTag(
|
||||||
|
key = key,
|
||||||
|
title = a.text(),
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,11 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.heancms.es
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.heancms.HeanCms
|
||||||
|
|
||||||
|
@MangaSourceParser("YUGEN_MANGAS_ES", "Yugen Mangas Es", "es", ContentType.HENTAI)
|
||||||
|
internal class YugenMangasEs(context: MangaLoaderContext) :
|
||||||
|
HeanCms(context, MangaSource.YUGEN_MANGAS_ES, "yugenmangas.net")
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.heancms.fr
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.heancms.HeanCms
|
||||||
|
|
||||||
|
@MangaSourceParser("PERF_SCAN", "Perf Scan", "fr")
|
||||||
|
internal class PerfScan(context: MangaLoaderContext) :
|
||||||
|
HeanCms(context, MangaSource.PERF_SCAN, "perf-scan.fr")
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.heancms.pt
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.heancms.HeanCms
|
||||||
|
|
||||||
|
@MangaSourceParser("REAPERSCANSPT", "ReaperScans Pt", "pt")
|
||||||
|
internal class ReaperScansPt(context: MangaLoaderContext) :
|
||||||
|
HeanCms(context, MangaSource.REAPERSCANSPT, "reaperscans.net")
|
||||||
@ -0,0 +1,161 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.heancmsalt
|
||||||
|
|
||||||
|
import kotlinx.coroutines.coroutineScope
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import java.text.DateFormat
|
||||||
|
import java.text.SimpleDateFormat
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
// Template similar to Heancms but with a different way of working
|
||||||
|
|
||||||
|
internal abstract class HeanCmsAlt(
|
||||||
|
context: MangaLoaderContext,
|
||||||
|
source: MangaSource,
|
||||||
|
domain: String,
|
||||||
|
pageSize: Int = 18,
|
||||||
|
) : PagedMangaParser(context, source, pageSize) {
|
||||||
|
|
||||||
|
override val configKeyDomain = ConfigKey.Domain(domain)
|
||||||
|
|
||||||
|
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.UPDATED)
|
||||||
|
|
||||||
|
protected open val listUrl = "/comics"
|
||||||
|
protected open val datePattern = "MMMM d, yyyy"
|
||||||
|
|
||||||
|
|
||||||
|
init {
|
||||||
|
paginator.firstPage = 1
|
||||||
|
searchPaginator.firstPage = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
protected open val selectManga = "div.grid.grid-cols-2 div:not([class]):contains(M)"
|
||||||
|
protected open val selectMangaTitle = "h5"
|
||||||
|
|
||||||
|
override suspend fun getListPage(
|
||||||
|
page: Int,
|
||||||
|
query: String?,
|
||||||
|
tags: Set<MangaTag>?,
|
||||||
|
sortOrder: SortOrder,
|
||||||
|
): List<Manga> {
|
||||||
|
//No search or tag
|
||||||
|
if(!query.isNullOrEmpty())
|
||||||
|
{
|
||||||
|
return emptyList()
|
||||||
|
}
|
||||||
|
val url = buildString {
|
||||||
|
append("https://")
|
||||||
|
append(domain)
|
||||||
|
append(listUrl)
|
||||||
|
if (page > 1) {
|
||||||
|
append("?page=")
|
||||||
|
append(page)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val doc = webClient.httpGet(url).parseHtml()
|
||||||
|
|
||||||
|
return doc.select(selectManga).map { div ->
|
||||||
|
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||||
|
Manga(
|
||||||
|
id = generateUid(href),
|
||||||
|
url = href,
|
||||||
|
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||||
|
coverUrl = div.selectFirstOrThrow("img").src().orEmpty(),
|
||||||
|
title = div.selectFirstOrThrow(selectMangaTitle).text().orEmpty(),
|
||||||
|
altTitle = null,
|
||||||
|
rating = RATING_UNKNOWN,
|
||||||
|
tags = emptySet(),
|
||||||
|
author = null,
|
||||||
|
state = null,
|
||||||
|
source = source,
|
||||||
|
isNsfw = isNsfwSource,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getTags(): Set<MangaTag> = emptySet()
|
||||||
|
|
||||||
|
protected open val selectDesc = "div.description-container"
|
||||||
|
protected open val selectAlt = "div.series-alternative-names"
|
||||||
|
protected open val selectChapter = "ul.MuiList-root a"
|
||||||
|
protected open val selectChapterTitle = "div.MuiListItemText-multiline span"
|
||||||
|
protected open val selectChapterDate = "div.MuiListItemText-multiline p"
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||||
|
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||||
|
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||||
|
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||||
|
manga.copy(
|
||||||
|
altTitle = doc.selectFirst(selectAlt)?.text().orEmpty(),
|
||||||
|
description = doc.selectFirstOrThrow(selectDesc).html(),
|
||||||
|
chapters = doc.select(selectChapter)
|
||||||
|
.mapChapters(reversed = true) { i, a ->
|
||||||
|
val dateText = a.selectFirstOrThrow(selectChapterDate).text()
|
||||||
|
val url = a.attrAsRelativeUrl("href").toAbsoluteUrl(domain)
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(url),
|
||||||
|
name = a.selectFirstOrThrow(selectChapterTitle).text(),
|
||||||
|
number = i + 1,
|
||||||
|
url = url,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = parseChapterDate(
|
||||||
|
dateFormat,
|
||||||
|
dateText,
|
||||||
|
),
|
||||||
|
branch = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected open val selectPage = "p.flex-col.items-center img"
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||||
|
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||||
|
return doc.select(selectPage).map { img ->
|
||||||
|
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
preview = null,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||||
|
val d = date?.lowercase() ?: return 0
|
||||||
|
return when {
|
||||||
|
d.startsWith("hace ") -> parseRelativeDate(date)
|
||||||
|
else -> dateFormat.tryParse(date)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses dates in this form:
|
||||||
|
// 21 hours ago
|
||||||
|
private fun parseRelativeDate(date: String): Long {
|
||||||
|
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||||
|
val cal = Calendar.getInstance()
|
||||||
|
|
||||||
|
return when {
|
||||||
|
WordSet("segundo").anyWordIn(date) -> cal.apply { add(Calendar.SECOND, -number) }.timeInMillis
|
||||||
|
WordSet("minutos", "minuto").anyWordIn(date) -> cal.apply { add(Calendar.MINUTE, -number) }.timeInMillis
|
||||||
|
WordSet("hora", "horas").anyWordIn(date) -> cal.apply { add(Calendar.HOUR, -number) }.timeInMillis
|
||||||
|
WordSet("días", "día").anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||||
|
WordSet("semana", "semanas").anyWordIn(date) -> cal.apply {
|
||||||
|
add(
|
||||||
|
Calendar.WEEK_OF_YEAR,
|
||||||
|
-number,
|
||||||
|
)
|
||||||
|
}.timeInMillis
|
||||||
|
|
||||||
|
WordSet("mes").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||||
|
WordSet("año").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||||
|
else -> 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.heancmsalt.es
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.heancmsalt.HeanCmsAlt
|
||||||
|
|
||||||
|
@MangaSourceParser("LEGIONSCANS", "CerberuSeries", "es")
|
||||||
|
internal class CerberuSeries(context: MangaLoaderContext) :
|
||||||
|
HeanCmsAlt(context, MangaSource.LEGIONSCANS, "cerberuseries.xyz")
|
||||||
@ -0,0 +1,24 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.heancmsalt.es
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.heancmsalt.HeanCmsAlt
|
||||||
|
|
||||||
|
@MangaSourceParser("MANGAESP", "MangaEsp", "es")
|
||||||
|
internal class MangaEsp(context: MangaLoaderContext) :
|
||||||
|
HeanCmsAlt(context, MangaSource.MANGAESP, "mangaesp.co", 15) {
|
||||||
|
|
||||||
|
override val listUrl = "/comic"
|
||||||
|
|
||||||
|
override val selectManga = "div.contenedor div.grid-5 .p-relative:not(.portada-contenedor)"
|
||||||
|
override val selectMangaTitle = "div.titulo-contenedor"
|
||||||
|
|
||||||
|
override val selectDesc = "div.project-sinopsis-contenido"
|
||||||
|
override val selectAlt = "div.project-info-opcion:contains(Altenativo) div.project-info-contenido"
|
||||||
|
override val selectChapter = "div.grid-capitulos div a"
|
||||||
|
override val selectChapterTitle = ".capitulo-info-titulo"
|
||||||
|
override val selectChapterDate = ".capitulo-info-fecha"
|
||||||
|
|
||||||
|
override val selectPage = ".grid-center img"
|
||||||
|
}
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.madara.ar
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||||
|
|
||||||
|
@MangaSourceParser("MANGALIKE_ORG", "MangaLike Org", "ar")
|
||||||
|
internal class MangaLikeOrg(context: MangaLoaderContext) :
|
||||||
|
MadaraParser(context, MangaSource.MANGALIKE_ORG, "mangalike.org", pageSize = 10)
|
||||||
@ -0,0 +1,12 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||||
|
|
||||||
|
@MangaSourceParser("CREEPYSCANS", "CreepyScans", "en")
|
||||||
|
internal class CreepyScans(context: MangaLoaderContext) :
|
||||||
|
MadaraParser(context, MangaSource.CREEPYSCANS, "creepyscans.com") {
|
||||||
|
override val stylepage = ""
|
||||||
|
}
|
||||||
@ -0,0 +1,13 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.madara.tr
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||||
|
|
||||||
|
@MangaSourceParser("YAOITR", "Yaoi Tr", "tr")
|
||||||
|
internal class YaoiTr(context: MangaLoaderContext) :
|
||||||
|
MadaraParser(context, MangaSource.YAOITR, "yaoitr.com", 16) {
|
||||||
|
|
||||||
|
override val datePattern = "d MMMM yyyy"
|
||||||
|
}
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.mangareader.ar
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||||
|
|
||||||
|
@MangaSourceParser("OZULSHOJO", "OzulShojo", "ar")
|
||||||
|
internal class OzulShojo(context: MangaLoaderContext) :
|
||||||
|
MangaReaderParser(context, MangaSource.OZULSHOJO, "ozulshojo.com", pageSize = 20, searchPageSize = 10)
|
||||||
@ -0,0 +1,12 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.mangareader.en
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||||
|
|
||||||
|
@MangaSourceParser("OZULSCANSEN", "OzulScans En", "en")
|
||||||
|
internal class OzulScansEn(context: MangaLoaderContext) :
|
||||||
|
MangaReaderParser(context, MangaSource.OZULSCANSEN, "ozulscansen.com", pageSize = 30, searchPageSize = 10) {
|
||||||
|
override val listUrl = "/comics"
|
||||||
|
}
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.mangareader.id
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("MANHWALAND", "Manhwa Land", "id", ContentType.HENTAI)
|
||||||
|
internal class ManhwaLand(context: MangaLoaderContext) :
|
||||||
|
MangaReaderParser(context, MangaSource.MANHWALAND, "manhwaland.lat", pageSize = 20, searchPageSize = 10) {
|
||||||
|
override val sourceLocale: Locale = Locale.ENGLISH
|
||||||
|
}
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.mangareader.pt
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||||
|
|
||||||
|
@MangaSourceParser("HENTAISSSSSCANLATOR", "Sssscanlator Hentai", "pt", type = ContentType.HENTAI)
|
||||||
|
internal class HentaiSsssscanlator(context: MangaLoaderContext) :
|
||||||
|
MangaReaderParser(context, MangaSource.HENTAISSSSSCANLATOR, "hentais.sssscanlator.com", pageSize = 20, searchPageSize = 10) {
|
||||||
|
|
||||||
|
override val datePattern = "MMM d, yyyy"
|
||||||
|
}
|
||||||
@ -0,0 +1,11 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.mangareader.th
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||||
|
|
||||||
|
@MangaSourceParser("MANGA168", "Manga 168", "th", ContentType.HENTAI)
|
||||||
|
internal class Manga168(context: MangaLoaderContext) :
|
||||||
|
MangaReaderParser(context, MangaSource.MANGA168, "manga168.com", pageSize = 40, searchPageSize = 30)
|
||||||
@ -0,0 +1,11 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.mangareader.tr
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||||
|
|
||||||
|
@MangaSourceParser("TAROTSCANS", "Tarot Scans", "tr")
|
||||||
|
internal class TarotScans(context: MangaLoaderContext) :
|
||||||
|
MangaReaderParser(context, MangaSource.TAROTSCANS, "www.tarotscans.com", pageSize = 20, searchPageSize = 10)
|
||||||
|
|
||||||
@ -0,0 +1,16 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.mmrcms.fr
|
||||||
|
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||||
|
import org.koitharu.kotatsu.parsers.site.mmrcms.MmrcmsParser
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("JPSCANVF", "JpScanVf", "fr")
|
||||||
|
internal class JpScanVf(context: MangaLoaderContext) :
|
||||||
|
MmrcmsParser(context, MangaSource.JPSCANVF, "jpscan-vf.net") {
|
||||||
|
|
||||||
|
//the search doesn't work on the source.
|
||||||
|
|
||||||
|
override val sourceLocale: Locale = Locale.ENGLISH
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue