commit
6058221fec
@ -0,0 +1,200 @@
|
||||
package org.koitharu.kotatsu.parsers.site.animebootstrap
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.json.JSONArray
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.util.*
|
||||
|
||||
// see https://themewagon.com/themes/free-bootstrap-4-html5-gaming-anime-website-template-anime/
|
||||
|
||||
internal abstract class AnimeBootstrapParser(
|
||||
context: MangaLoaderContext,
|
||||
source: MangaSource,
|
||||
domain: String,
|
||||
pageSize: Int = 24,
|
||||
) : PagedMangaParser(context, source, pageSize) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain(domain)
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.NEWEST,
|
||||
)
|
||||
|
||||
protected open val listUrl = "/manga"
|
||||
protected open val datePattern = "dd MMM. yyyy"
|
||||
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append(listUrl)
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
append("&type=all")
|
||||
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("&search=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("&categorie=")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
|
||||
append("&sort=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("view")
|
||||
SortOrder.UPDATED -> append("updated")
|
||||
SortOrder.ALPHABETICAL -> append("default")
|
||||
SortOrder.NEWEST -> append("published")
|
||||
else -> append("updated")
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.col-6 div.product__item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirstOrThrow("div.product__item__pic").attr("data-setbg").orEmpty(),
|
||||
title = div.selectFirstOrThrow("div.product__item__text").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain$listUrl").parseHtml()
|
||||
return doc.select("div.product__page__filter div:contains(Genre:) option ").mapNotNullToSet { option ->
|
||||
val key = option.attr("value") ?: return@mapNotNullToSet null
|
||||
val name = option.text()
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectDesc = "div.anime__details__text p"
|
||||
protected open val selectState = "div.anime__details__widget li:contains(Ongoing)"
|
||||
protected open val selectTag = "div.anime__details__widget li:contains(Categorie) a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val state = if (doc.select(selectState).isNullOrEmpty()) {
|
||||
MangaState.FINISHED
|
||||
} else {
|
||||
MangaState.ONGOING
|
||||
}
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").substringAfterLast('='),
|
||||
title = a.text().toTitleCase().replace(",", ""),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
protected open val selectChapter = "div.anime__details__episodes a"
|
||||
|
||||
protected open suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, a ->
|
||||
val href = a.attr("href")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = 0,
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected open val selectPage = "div.read-img img"
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
if (doc.select("script:containsData(page_image)").isNullOrEmpty()) {
|
||||
return doc.select(selectPage).map { img ->
|
||||
val url = img.attr("onerror").replace("this.onerror=null;this.src=`", "").replace("`;", "")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
val script = doc.selectFirstOrThrow("script:containsData(page_image)")
|
||||
val images = JSONArray(script.data().substringAfterLast("var pages = ").substringBefore(';'))
|
||||
|
||||
val pages = ArrayList<MangaPage>(images.length())
|
||||
for (i in 0 until images.length()) {
|
||||
|
||||
val pageTake = images.getJSONObject(i)
|
||||
pages.add(
|
||||
MangaPage(
|
||||
id = generateUid(pageTake.getString("page_image")),
|
||||
url = pageTake.getString("page_image"),
|
||||
preview = null,
|
||||
source = source,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
return pages
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,164 @@
|
||||
package org.koitharu.kotatsu.parsers.site.animebootstrap.fr
|
||||
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.model.MangaState
|
||||
import org.koitharu.kotatsu.parsers.model.MangaTag
|
||||
import org.koitharu.kotatsu.parsers.model.RATING_UNKNOWN
|
||||
import org.koitharu.kotatsu.parsers.model.SortOrder
|
||||
import org.koitharu.kotatsu.parsers.site.animebootstrap.AnimeBootstrapParser
|
||||
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
|
||||
import org.koitharu.kotatsu.parsers.util.domain
|
||||
import org.koitharu.kotatsu.parsers.util.generateUid
|
||||
import org.koitharu.kotatsu.parsers.util.host
|
||||
import org.koitharu.kotatsu.parsers.util.mapChapters
|
||||
import org.koitharu.kotatsu.parsers.util.mapNotNullToSet
|
||||
import org.koitharu.kotatsu.parsers.util.parseHtml
|
||||
import org.koitharu.kotatsu.parsers.util.removeSuffix
|
||||
import org.koitharu.kotatsu.parsers.util.selectFirstOrThrow
|
||||
import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl
|
||||
import org.koitharu.kotatsu.parsers.util.toTitleCase
|
||||
import org.koitharu.kotatsu.parsers.util.tryParse
|
||||
import org.koitharu.kotatsu.parsers.util.urlEncoded
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.EnumSet
|
||||
import java.util.Locale
|
||||
|
||||
|
||||
@MangaSourceParser("PAPSCAN", "PapScan", "fr")
|
||||
internal class PapScan(context: MangaLoaderContext) :
|
||||
AnimeBootstrapParser(context, MangaSource.PAPSCAN, "papscan.com") {
|
||||
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
|
||||
override val listUrl = "/liste-manga"
|
||||
|
||||
override val selectState = "div.anime__details__widget li:contains(En cours)"
|
||||
override val selectTag = "div.anime__details__widget li:contains(Genre) a"
|
||||
|
||||
override val selectChapter = "ul.chapters li"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/filterList")
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("&alpha=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("&cat=")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
append("&sortBy=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("views")
|
||||
SortOrder.ALPHABETICAL -> append("name")
|
||||
else -> append("updated")
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.product__item").map { div ->
|
||||
val href = div.selectFirstOrThrow("h5 a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirstOrThrow("div.product__item__pic").attr("data-setbg").orEmpty(),
|
||||
title = div.selectFirstOrThrow("div.product__item__text h5").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain$listUrl").parseHtml()
|
||||
return doc.select("a.category ").mapNotNullToSet { a ->
|
||||
val key = a.attr("href").substringAfterLast('=')
|
||||
val name = a.text()
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val state = if (doc.select(selectState).isNullOrEmpty()) {
|
||||
MangaState.FINISHED
|
||||
} else {
|
||||
MangaState.ONGOING
|
||||
}
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val href = li.selectFirstOrThrow("a").attr("href")
|
||||
val dateText = li.selectFirst("span.date-chapter-title-rtl")?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = li.selectFirstOrThrow("span em").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = dateFormat.tryParse(dateText),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.animebootstrap.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.animebootstrap.AnimeBootstrapParser
|
||||
|
||||
|
||||
@MangaSourceParser("KOMIKZOID", "KomikzoId", "id")
|
||||
internal class KomikzoId(context: MangaLoaderContext) :
|
||||
AnimeBootstrapParser(context, MangaSource.KOMIKZOID, "komikzoid.xyz")
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.animebootstrap.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.animebootstrap.AnimeBootstrapParser
|
||||
|
||||
|
||||
@MangaSourceParser("NEUMANGA", "Neu Manga", "id")
|
||||
internal class NeuManga(context: MangaLoaderContext) :
|
||||
AnimeBootstrapParser(context, MangaSource.NEUMANGA, "neumanga.xyz")
|
||||
@ -0,0 +1,11 @@
|
||||
package org.koitharu.kotatsu.parsers.site.animebootstrap.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.animebootstrap.AnimeBootstrapParser
|
||||
|
||||
@MangaSourceParser("SEKTEKOMIK", "Sekte Komik", "id")
|
||||
internal class SekteKomik(context: MangaLoaderContext) :
|
||||
AnimeBootstrapParser(context, MangaSource.SEKTEKOMIK, "sektekomik.xyz")
|
||||
@ -0,0 +1,288 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fmreader
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal abstract class FmreaderParser(
|
||||
context: MangaLoaderContext,
|
||||
source: MangaSource,
|
||||
domain: String,
|
||||
pageSize: Int = 20,
|
||||
) : PagedMangaParser(context, source, pageSize) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain(domain)
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
)
|
||||
|
||||
protected open val listeurl = "/manga-list.html"
|
||||
protected open val datePattern = "MMMM d, yyyy"
|
||||
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
|
||||
@JvmField
|
||||
protected val ongoing: Set<String> = setOf(
|
||||
"On going",
|
||||
"Incomplete",
|
||||
)
|
||||
|
||||
@JvmField
|
||||
protected val finished: Set<String> = setOf(
|
||||
"Completed",
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append(listeurl)
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
|
||||
append("&name=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
!tags.isNullOrEmpty() -> {
|
||||
append("&genre=")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
append("&sort=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("views")
|
||||
SortOrder.UPDATED -> append("last_update")
|
||||
SortOrder.ALPHABETICAL -> append("name")
|
||||
else -> append("last_update")
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.thumb-item-flow").map { div ->
|
||||
|
||||
val href = div.selectFirstOrThrow("div.series-title a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirstOrThrow("div.img-in-ratio").attr("style").substringAfter("('")
|
||||
.substringBeforeLast("')"),
|
||||
title = div.selectFirstOrThrow("div.series-title").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/$listeurl").parseHtml()
|
||||
return doc.select("ul.filter-type li").mapNotNullToSet { li ->
|
||||
val a = li.selectFirst("a") ?: return@mapNotNullToSet null
|
||||
val href = a.attr("href").substringAfter("manga-list-genre-").substringBeforeLast(".html")
|
||||
MangaTag(
|
||||
key = href,
|
||||
title = a.text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectDesc = "div.summary-content"
|
||||
protected open val selectState = "ul.manga-info li:contains(Status) a"
|
||||
protected open val selectAlt = "ul.manga-info li:contains(Other names)"
|
||||
protected open val selectAut = "ul.manga-info li:contains(Author(s)) a"
|
||||
protected open val selectTag = "ul.manga-info li:contains(Genre(s)) a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.selectFirst(selectState)
|
||||
|
||||
val state = stateDiv?.let {
|
||||
when (it.text()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt = doc.body().selectFirst(selectAlt)?.text()?.replace("Other names", "")
|
||||
val auth = doc.body().selectFirst(selectAut)?.text()
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").substringAfter("manga-list-genre-").substringBeforeLast(".html"),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = auth,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
protected open val selectDate = "div.chapter-time"
|
||||
protected open val selectChapter = "ul.list-chapters a"
|
||||
|
||||
protected open suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, a ->
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = a.selectFirst(selectDate)?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.selectFirstOrThrow("div.chapter-name").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectPage = "div.chapter-content img"
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
return doc.select(selectPage).map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") ||
|
||||
// short Hours
|
||||
d.endsWith(" h") ||
|
||||
// short Day
|
||||
d.endsWith(" d") -> parseRelativeDate(date)
|
||||
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet("hour", "hours", "h").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"min",
|
||||
"minute",
|
||||
"minutes",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,95 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fmreader.en
|
||||
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.model.MangaState
|
||||
import org.koitharu.kotatsu.parsers.model.MangaTag
|
||||
import org.koitharu.kotatsu.parsers.site.fmreader.FmreaderParser
|
||||
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
|
||||
import org.koitharu.kotatsu.parsers.util.domain
|
||||
import org.koitharu.kotatsu.parsers.util.generateUid
|
||||
import org.koitharu.kotatsu.parsers.util.mapChapters
|
||||
import org.koitharu.kotatsu.parsers.util.mapNotNullToSet
|
||||
import org.koitharu.kotatsu.parsers.util.parseHtml
|
||||
import org.koitharu.kotatsu.parsers.util.selectFirstOrThrow
|
||||
import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl
|
||||
import org.koitharu.kotatsu.parsers.util.toTitleCase
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
@MangaSourceParser("MANHWA18COM", "Manhwa18 Com", "en", ContentType.HENTAI)
|
||||
internal class Manhwa18Com(context: MangaLoaderContext) :
|
||||
FmreaderParser(context, MangaSource.MANHWA18COM, "manhwa18.com") {
|
||||
|
||||
override val listeurl = "/tim-kiem"
|
||||
|
||||
override val selectState = "div.info-item:contains(Status) span.info-value "
|
||||
override val selectAlt = "div.info-item:contains(Other name) span.info-value "
|
||||
override val selectTag = "div.info-item:contains(Genre) span.info-value a"
|
||||
override val datePattern = "dd/MM/yyyy"
|
||||
override val selectPage = "div#chapter-content img"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.selectFirst(selectState)
|
||||
|
||||
val state = stateDiv?.let {
|
||||
when (it.text()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt = doc.body().selectFirst(selectAlt)?.text()?.replace("Other name", "")
|
||||
val auth = doc.body().selectFirst(selectAut)?.text()
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").substringAfter("manga-list-genre-").substringBeforeLast(".html"),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = auth,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, a ->
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = a.selectFirst(selectDate)?.text()?.substringAfter("- ")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.selectFirstOrThrow("div.chapter-name").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,131 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fmreader.ja
|
||||
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaPage
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.model.MangaTag
|
||||
import org.koitharu.kotatsu.parsers.model.RATING_UNKNOWN
|
||||
import org.koitharu.kotatsu.parsers.model.SortOrder
|
||||
import org.koitharu.kotatsu.parsers.site.fmreader.FmreaderParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
|
||||
@MangaSourceParser("KLZ9", "Klz9", "ja")
|
||||
internal class Klz9(context: MangaLoaderContext) :
|
||||
FmreaderParser(context, MangaSource.KLZ9, "klz9.com") {
|
||||
|
||||
override val selectDesc = "div.row:contains(Description)"
|
||||
override val selectState = "ul.manga-info li:contains(Status) a"
|
||||
override val selectAlt = "ul.manga-info li:contains(Other name (s))"
|
||||
override val selectTag = "ul.manga-info li:contains(Genre(s)) a"
|
||||
override val selectChapter = "tr"
|
||||
override val selectDate = "td i"
|
||||
override val selectPage = "img"
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/$listeurl")
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
|
||||
append("&name=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
!tags.isNullOrEmpty() -> {
|
||||
append("&genre=")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
append("&sort=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("views")
|
||||
SortOrder.UPDATED -> append("last_update")
|
||||
SortOrder.ALPHABETICAL -> append("name")
|
||||
else -> append("last_update")
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.thumb-item-flow").map { div ->
|
||||
val href = "/" + div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirstOrThrow("div.img-in-ratio").attr("style").substringAfter("('")
|
||||
.substringBeforeLast("')"),
|
||||
title = div.selectFirstOrThrow("div.series-title").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val slug = doc.selectFirstOrThrow("div.h0rating").attr("slug")
|
||||
val docload =
|
||||
webClient.httpGet("https://$domain/app/manga/controllers/cont.listChapter.php?slug=$slug").parseHtml()
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return docload.body().select(selectChapter).mapChapters(reversed = true) { i, a ->
|
||||
val href = "/" + a.selectFirstOrThrow("a.chapter").attrAsRelativeUrl("href")
|
||||
val dateText = a.selectFirst(selectDate)?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.selectFirstOrThrow("a").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val cid = doc.selectFirstOrThrow("#chapter").attr("value")
|
||||
val docload = webClient.httpGet("https://$domain/app/manga/controllers/cont.listImg.php?cid=$cid").parseHtml()
|
||||
|
||||
return docload.select(selectPage).map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,61 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fmreader.ja
|
||||
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaPage
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.fmreader.FmreaderParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
|
||||
@MangaSourceParser("WELOVEMANGA", "WeLoveManga", "ja")
|
||||
internal class WeLoveManga(context: MangaLoaderContext) :
|
||||
FmreaderParser(context, MangaSource.WELOVEMANGA, "welovemanga.one") {
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val mid = doc.selectFirstOrThrow("div.cmt input").attr("value")
|
||||
val docload =
|
||||
webClient.httpGet("https://$domain/app/manga/controllers/cont.Listchapter.php?mid=$mid").parseHtml()
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return docload.body().select(selectChapter).mapChapters(reversed = true) { i, a ->
|
||||
val href = a.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
val dateText = a.selectFirst(selectDate)?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.selectFirstOrThrow("a").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val cid = doc.selectFirstOrThrow("#chapter").attr("value")
|
||||
val docload = webClient.httpGet("https://$domain/app/manga/controllers/cont.listImg.php?cid=$cid").parseHtml()
|
||||
return docload.select("img").map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fmreader.ja
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.fmreader.FmreaderParser
|
||||
|
||||
|
||||
@MangaSourceParser("WELOMA", "Weloma", "ja")
|
||||
internal class Weloma(context: MangaLoaderContext) :
|
||||
FmreaderParser(context, MangaSource.WELOMA, "weloma.art")
|
||||
@ -0,0 +1,184 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fr
|
||||
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import okhttp3.Headers
|
||||
import org.json.JSONArray
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.util.*
|
||||
|
||||
@MangaSourceParser("SCANS_MANGAS_ME", "Scans Mangas Me", "fr")
|
||||
internal class ScansMangasMe(context: MangaLoaderContext) :
|
||||
PagedMangaParser(context, MangaSource.SCANS_MANGAS_ME, 0) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("scansmangas.me")
|
||||
|
||||
override val headers: Headers = Headers.Builder()
|
||||
.add("User-Agent", UserAgents.CHROME_DESKTOP)
|
||||
.build()
|
||||
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
if (page == 1) {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("/?s=")
|
||||
append(query.urlEncoded())
|
||||
append("&post_type=manga")
|
||||
|
||||
} else if (!tags.isNullOrEmpty()) {
|
||||
append("/genres/")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
} else {
|
||||
append("/tous-nos-mangas/")
|
||||
append("?order=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("popular")
|
||||
SortOrder.UPDATED -> append("update")
|
||||
SortOrder.ALPHABETICAL -> append("title")
|
||||
SortOrder.NEWEST -> append("create")
|
||||
else -> append("update")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.postbody .bs .bsx").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("div.bigor div.tt").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = div.selectFirstOrThrow("div.rating i").ownText().toFloatOrNull()?.div(10f)
|
||||
?: RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/tous-nos-mangas/").parseHtml()
|
||||
return doc.select("ul.genre li").mapNotNullToSet { li ->
|
||||
val key = li.selectFirstOrThrow("a").attr("href").removeSuffix('/').substringAfterLast('/')
|
||||
val name = li.selectFirstOrThrow("a").text()
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = getChapters(doc)
|
||||
|
||||
val desc = doc.selectFirstOrThrow("div.desc").html()
|
||||
|
||||
val state = if (doc.select("div.spe span:contains(En cours)").isNullOrEmpty()) {
|
||||
MangaState.FINISHED
|
||||
} else {
|
||||
MangaState.ONGOING
|
||||
}
|
||||
|
||||
val alt = doc.body().select("div.infox span.alter").text()
|
||||
|
||||
val aut = doc.select("div.spe span")[2].text().replace("Auteur:", "")
|
||||
|
||||
manga.copy(
|
||||
tags = doc.select("div.spe span:contains(Genres) a").mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = aut,
|
||||
state = state,
|
||||
chapters = chaptersDeferred,
|
||||
isNsfw = manga.isNsfw,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
private fun getChapters(doc: Document): List<MangaChapter> {
|
||||
return doc.body().requireElementById("chapter_list").select("li").mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = li.selectFirstOrThrow("span.mobile chapter").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = 0,
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val script = doc.selectFirstOrThrow("script:containsData(page_image)")
|
||||
val images = JSONArray(script.data().substringAfterLast("var pages = ").substringBefore(';'))
|
||||
|
||||
val pages = ArrayList<MangaPage>(images.length())
|
||||
for (i in 0 until images.length()) {
|
||||
|
||||
val pageTake = images.getJSONObject(i)
|
||||
pages.add(
|
||||
MangaPage(
|
||||
id = generateUid(pageTake.getString("page_image")),
|
||||
url = pageTake.getString("page_image"),
|
||||
preview = null,
|
||||
source = source,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
return pages
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.es
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
|
||||
@MangaSourceParser("LKSCANLATION", "LkScanlation", "es")
|
||||
internal class LkScanlation(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.LKSCANLATION, "lkscanlation.com") {
|
||||
|
||||
override val tagPrefix = "manhwa-genre/"
|
||||
override val listUrl = "manhwa/"
|
||||
}
|
||||
@ -0,0 +1,162 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.vi
|
||||
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
|
||||
|
||||
@MangaSourceParser("SAYTRUYENHAY", "Saytruyenhay", "vi")
|
||||
internal class Saytruyenhay(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.SAYTRUYENHAY, "saytruyenhay.com", 40) {
|
||||
|
||||
override val tagPrefix = "genre/"
|
||||
override val withoutAjax = true
|
||||
override val listUrl = "public/genre/manga/"
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
|
||||
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
val pages = page + 1
|
||||
|
||||
when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
|
||||
append("/public/search?s=")
|
||||
append(query.urlEncoded())
|
||||
append("&page=")
|
||||
append(pages.toString())
|
||||
}
|
||||
|
||||
!tags.isNullOrEmpty() -> {
|
||||
append("/$tagPrefix")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
append("?page=")
|
||||
append(pages.toString())
|
||||
}
|
||||
|
||||
else -> {
|
||||
|
||||
append("/$listUrl")
|
||||
append("?page=")
|
||||
append(pages.toString())
|
||||
}
|
||||
}
|
||||
append("&m_orderby=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("views")
|
||||
SortOrder.UPDATED -> append("latest")
|
||||
SortOrder.NEWEST -> append("new-manga")
|
||||
SortOrder.ALPHABETICAL -> append("alphabet")
|
||||
else -> append("latest")
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
|
||||
return doc.select("div.row.c-tabs-item__content").ifEmpty {
|
||||
doc.select("div.page-item-detail")
|
||||
}.map { div ->
|
||||
val href = div.selectFirst("a")?.attrAsRelativeUrlOrNull("href") ?: div.parseFailed("Link not found")
|
||||
val summary = div.selectFirst(".tab-summary") ?: div.selectFirst(".item-summary")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = (summary?.selectFirst("h3") ?: summary?.selectFirst("h4"))?.text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = div.selectFirst("span.total_votes")?.ownText()?.toFloatOrNull()?.div(5f) ?: -1f,
|
||||
tags = summary?.selectFirst(".mg_genres")?.select("a")?.mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().ifEmpty { return@mapNotNullToSet null }.toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
}.orEmpty(),
|
||||
author = summary?.selectFirst(".mg_author")?.selectFirst("a")?.ownText(),
|
||||
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
|
||||
?.lowercase()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
val body = doc.body()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = body.select(selectDesc).let {
|
||||
if (it.select("p").text().isNotEmpty()) {
|
||||
it.select("p").joinToString(separator = "\n\n") { p ->
|
||||
p.text().replace("<br>", "\n")
|
||||
}
|
||||
} else {
|
||||
it.text()
|
||||
}
|
||||
}
|
||||
|
||||
val stateDiv = (body.selectFirst("div.post-content_item:contains(Status)")
|
||||
?: body.selectFirst("div.post-content_item:contains(Statut)")
|
||||
?: body.selectFirst("div.post-content_item:contains(État)")
|
||||
?: body.selectFirst("div.post-content_item:contains(حالة العمل)")
|
||||
?: body.selectFirst("div.post-content_item:contains(Estado)")
|
||||
?: body.selectFirst("div.post-content_item:contains(สถานะ)")
|
||||
?: body.selectFirst("div.post-content_item:contains(Stato)")
|
||||
?: body.selectFirst("div.post-content_item:contains(Durum)")
|
||||
?: body.selectFirst("div.post-content_item:contains(Statüsü)")
|
||||
?: body.selectFirst("div.post-content_item:contains(Статус)")
|
||||
?: body.selectFirst("div.post-content_item:contains(状态)")
|
||||
?: body.selectFirst("div.post-content_item:contains(الحالة)"))?.selectLast("div.summary-content")
|
||||
|
||||
val state = stateDiv?.let {
|
||||
when (it.text()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt =
|
||||
doc.body().select(".post-content_item:contains(Alt) .summary-content").firstOrNull()?.tableValue()?.text()
|
||||
?.trim() ?: doc.body().select(".post-content_item:contains(Nomes alternativos: ) .summary-content")
|
||||
.firstOrNull()?.tableValue()?.text()?.trim()
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectGenre).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix("/").substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,298 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal abstract class MadthemeParser(
|
||||
context: MangaLoaderContext,
|
||||
source: MangaSource,
|
||||
domain: String,
|
||||
pageSize: Int = 48,
|
||||
) : PagedMangaParser(context, source, pageSize) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain(domain)
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.RATING,
|
||||
)
|
||||
|
||||
protected open val listUrl = "search/"
|
||||
protected open val datePattern = "MMM dd, yyyy"
|
||||
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
|
||||
@JvmField
|
||||
protected val ongoing: Set<String> = setOf(
|
||||
"On Going",
|
||||
"Ongoing",
|
||||
"ONGOING",
|
||||
)
|
||||
|
||||
@JvmField
|
||||
protected val finished: Set<String> = setOf(
|
||||
"Completed",
|
||||
"COMPLETED",
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/$listUrl?sort=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("views")
|
||||
SortOrder.UPDATED -> append("updated_at")
|
||||
SortOrder.ALPHABETICAL -> append("name") // On some sites without tags or searches, the alphabetical option is empty.
|
||||
SortOrder.NEWEST -> append("created_at")
|
||||
SortOrder.RATING -> append("rating")
|
||||
}
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("&q=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
for (tag in tags) {
|
||||
append("&")
|
||||
append("genre[]".urlEncoded())
|
||||
append("=")
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
|
||||
append("&page=")
|
||||
append(page.toString())
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.book-item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("div.meta").selectFirst("div.title")?.text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = div.selectFirstOrThrow("div.meta span.score").ownText().toFloatOrNull()?.div(5f)
|
||||
?: RATING_UNKNOWN,
|
||||
tags = doc.body().select("div.meta div.genres span").mapNotNullToSet { span ->
|
||||
MangaTag(
|
||||
key = span.attr("class"),
|
||||
title = span.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/$listUrl").parseHtml()
|
||||
return doc.select("div.genres label.checkbox").mapNotNullToSet { checkbox ->
|
||||
val key = checkbox.selectFirstOrThrow("input").attr("value") ?: return@mapNotNullToSet null
|
||||
val name = checkbox.selectFirstOrThrow("span.radio__label").text()
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectDesc = "div.section-body.summary p.content"
|
||||
protected open val selectState = "div.detail p:contains(Status) span"
|
||||
protected open val selectAlt = "div.detail div.name h2"
|
||||
protected open val selectTag = "div.detail p:contains(Genres) a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.selectFirst(selectState)
|
||||
|
||||
val state = stateDiv?.let {
|
||||
when (it.text()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt = doc.body().select(selectAlt).text()
|
||||
|
||||
val nsfw = doc.getElementById("adt-warning") != null
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().toTitleCase().replace(",", ""),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
isNsfw = nsfw || manga.isNsfw,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
protected open val selectDate = "div .chapter-update"
|
||||
protected open val selectChapter = "ul#chapter-list li"
|
||||
|
||||
protected open suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = li.selectFirstOrThrow(".chapter-title").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectPage = "div#chapter-images img"
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
return doc.select(selectPage).map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") ||
|
||||
// short Hours
|
||||
d.endsWith(" h") ||
|
||||
// short Day
|
||||
d.endsWith(" d") -> parseRelativeDate(date)
|
||||
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet("hour", "hours", "h").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"min",
|
||||
"minute",
|
||||
"minutes",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("BEEHENTAI", "BeeHentai", "en", ContentType.HENTAI)
|
||||
internal class BeeHentai(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.BEEHENTAI, "beehentai.com") {
|
||||
|
||||
override val selectDesc = "div.section-body"
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("MANGABUDDY", "MangaBuddy", "en")
|
||||
internal class MangaBuddy(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.MANGABUDDY, "mangabuddy.com")
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("MANGACUTE", "MangaCute", "en")
|
||||
internal class MangaCute(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.MANGACUTE, "mangacute.com")
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("MANGAFOREST", "Manga Forest", "en")
|
||||
internal class MangaForest(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.MANGAFOREST, "mangaforest.me")
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("MANGAPUMA", "Manga Puma", "en")
|
||||
internal class MangaPuma(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.MANGAPUMA, "mangapuma.com")
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("MANGAXYZ", "Mangaxyz", "en")
|
||||
internal class Mangaxyz(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.MANGAXYZ, "mangaxyz.com")
|
||||
@ -0,0 +1,16 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("TOONITUBE", "TooniTube", "en", ContentType.HENTAI)
|
||||
internal class TooniTube(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.TOONITUBE, "toonitube.com") {
|
||||
|
||||
override val selectDesc = "div.summary div.section-body p.content"
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("TOONILY_ME", "Toonily Me", "en", ContentType.HENTAI)
|
||||
internal class ToonilyMe(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.TOONILY_ME, "toonily.me") {
|
||||
|
||||
override val selectDesc = "div.summary div.section-body p.content"
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madtheme.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madtheme.MadthemeParser
|
||||
|
||||
|
||||
@MangaSourceParser("TRUEMANGA", "True Manga", "en")
|
||||
internal class TrueManga(context: MangaLoaderContext) :
|
||||
MadthemeParser(context, MangaSource.TRUEMANGA, "truemanga.com")
|
||||
@ -0,0 +1,313 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangabox
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal abstract class MangaboxParser(
|
||||
context: MangaLoaderContext,
|
||||
source: MangaSource,
|
||||
pageSize: Int = 24,
|
||||
) : PagedMangaParser(context, source, pageSize) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.NEWEST,
|
||||
)
|
||||
|
||||
protected open val listUrl = "/genre-all"
|
||||
protected open val searchUrl = "/search/story/"
|
||||
protected open val datePattern = "MMM dd,yy"
|
||||
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
|
||||
@JvmField
|
||||
protected val ongoing: Set<String> = setOf(
|
||||
"Ongoing",
|
||||
)
|
||||
|
||||
@JvmField
|
||||
protected val finished: Set<String> = setOf(
|
||||
"Completed",
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append(searchUrl)
|
||||
append(query.urlEncoded())
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
|
||||
} else if (!tags.isNullOrEmpty()) {
|
||||
append("/")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
append("/")
|
||||
append(page.toString())
|
||||
} else {
|
||||
append("$listUrl/")
|
||||
if (page > 1) {
|
||||
append(page.toString())
|
||||
}
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("?type=topview")
|
||||
SortOrder.UPDATED -> append("")
|
||||
SortOrder.NEWEST -> append("?type=newest")
|
||||
else -> append("")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.content-genres-item, div.list-story-item").ifEmpty {
|
||||
doc.select("div.search-story-item")
|
||||
}.map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("h3").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectTagMap = "div.panel-genres-list a:not(.genres-select)"
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/$listUrl").parseHtml()
|
||||
return doc.select(selectTagMap).mapNotNullToSet { a ->
|
||||
val key = a.attr("href").removeSuffix('/').substringAfterLast('/')
|
||||
val name = a.attr("title").replace(" Manga", "")
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectDesc = "div#noidungm, div#panel-story-info-description"
|
||||
protected open val selectState = "li:contains(status), td:containsOwn(status) + td"
|
||||
protected open val selectAlt = ".story-alternative, tr:has(.info-alternative) h2"
|
||||
protected open val selectAut = "li:contains(author) a, td:contains(author) + td a"
|
||||
protected open val selectTag = "div.manga-info-top li:contains(genres) a , td:containsOwn(genres) + td a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.select(selectState).text()
|
||||
|
||||
val state = stateDiv.let {
|
||||
when (it) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt = doc.body().select(selectAlt).text().replace("Alternative : ", "")
|
||||
|
||||
val aut = doc.body().select(selectAut).eachText().joinToString()
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").substringAfterLast("category=").substringBefore("&"),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = aut,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
isNsfw = manga.isNsfw,
|
||||
)
|
||||
}
|
||||
|
||||
protected open val selectDate = "span"
|
||||
protected open val selectChapter = "div.chapter-list div.row, ul.row-content-chapter li"
|
||||
|
||||
protected open suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.select(selectDate).last()?.text()
|
||||
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectPage = "div#vungdoc img, div.container-chapter-reader img"
|
||||
|
||||
protected open val otherDomain = ""
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
if (doc.select(selectPage).isNullOrEmpty()) {
|
||||
val fullUrl2 = chapter.url.toAbsoluteUrl(domain).replace(domain, otherDomain)
|
||||
val doc2 = webClient.httpGet(fullUrl2).parseHtml()
|
||||
|
||||
return doc2.select(selectPage).map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
return doc.select(selectPage).map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") ||
|
||||
// short Hours
|
||||
d.endsWith(" h") ||
|
||||
// short Day
|
||||
d.endsWith(" d") -> parseRelativeDate(date)
|
||||
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet("hour", "hours", "h").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"min",
|
||||
"minute",
|
||||
"minutes",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,22 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangabox.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangabox.MangaboxParser
|
||||
|
||||
@MangaSourceParser("HMANGABAT", "Mangabat", "en")
|
||||
internal class Mangabat(context: MangaLoaderContext) :
|
||||
MangaboxParser(context, MangaSource.HMANGABAT) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("h.mangabat.com", "readmangabat.com")
|
||||
|
||||
override val otherDomain = "readmangabat.com"
|
||||
|
||||
override val searchUrl = "/search/manga/"
|
||||
|
||||
override val listUrl = "/manga-list-all"
|
||||
override val selectTagMap = "div.panel-category p.pn-category-row:not(.pn-category-row-border) a"
|
||||
}
|
||||
@ -0,0 +1,159 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangabox.en
|
||||
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.model.MangaState
|
||||
import org.koitharu.kotatsu.parsers.model.MangaTag
|
||||
import org.koitharu.kotatsu.parsers.model.RATING_UNKNOWN
|
||||
import org.koitharu.kotatsu.parsers.model.SortOrder
|
||||
import org.koitharu.kotatsu.parsers.site.mangabox.MangaboxParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
|
||||
|
||||
@MangaSourceParser("MANGAIRO", "Mangairo", "en")
|
||||
internal class Mangairo(context: MangaLoaderContext) :
|
||||
MangaboxParser(context, MangaSource.MANGAIRO) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("w.mangairo.com", "chap.mangairo.com")
|
||||
|
||||
override val otherDomain = "chap.mangairo.com"
|
||||
|
||||
override val datePattern = "MMM-dd-yy"
|
||||
override val listUrl = "/manga-list"
|
||||
override val searchUrl = "/list/search/"
|
||||
|
||||
override val selectDesc = "div#story_discription p"
|
||||
override val selectState = "ul.story_info_right li:contains(Status) a"
|
||||
override val selectAlt = "ul.story_info_right li:contains(Alter) h2"
|
||||
override val selectAut = "ul.story_info_right li:contains(Author) a"
|
||||
override val selectTag = "ul.story_info_right li:contains(Genres) a"
|
||||
|
||||
override val selectChapter = "div.chapter_list li"
|
||||
override val selectDate = "p"
|
||||
|
||||
override val selectPage = "div.panel-read-story img"
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append(searchUrl)
|
||||
append(query.urlEncoded())
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
|
||||
|
||||
} else {
|
||||
|
||||
append("$listUrl/")
|
||||
|
||||
append("/type-")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("topview")
|
||||
SortOrder.UPDATED -> append("latest")
|
||||
SortOrder.NEWEST -> append("newest")
|
||||
else -> append("latest")
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("/ctg-")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
} else {
|
||||
append("/ctg-all")
|
||||
}
|
||||
append("/state-all/page-")
|
||||
append(page.toString())
|
||||
}
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.story-item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = (div.selectFirst("h2")?.text() ?: div.selectFirst("h3")?.text()).orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/$listUrl/type-latest/ctg-all/state-all/page-1").parseHtml()
|
||||
return doc.select("div.panel_category a:not(.ctg_select)").mapNotNullToSet { a ->
|
||||
val key = a.attr("href").substringAfterLast("ctg-").substringBefore("/")
|
||||
val name = a.attr("title").replace("Category ", "")
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.select(selectState).text()
|
||||
|
||||
val state = stateDiv.let {
|
||||
when (it) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt = doc.body().select(selectAlt).text().replace("Alternative : ", "")
|
||||
|
||||
val aut = doc.body().select(selectAut).eachText().joinToString()
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href")
|
||||
.substringAfterLast("page-"), // Yes the site, it's crashing between page is tag id
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = aut,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
isNsfw = manga.isNsfw,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -0,0 +1,19 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangabox.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangabox.MangaboxParser
|
||||
|
||||
|
||||
@MangaSourceParser("MANGANATO", "Manganato", "en")
|
||||
internal class Manganato(context: MangaLoaderContext) :
|
||||
MangaboxParser(context, MangaSource.MANGANATO) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("chapmanganato.com", "manganato.com")
|
||||
|
||||
override val otherDomain = "chapmanganato.com"
|
||||
|
||||
}
|
||||
@ -0,0 +1,14 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.id
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
import java.util.Locale
|
||||
|
||||
@MangaSourceParser("ALCEASCAN", "AlceaScan", "id")
|
||||
internal class AlceaScan(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.ALCEASCAN, "alceascan.my.id", pageSize = 20, searchPageSize = 10) {
|
||||
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.id
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
import java.util.Locale
|
||||
|
||||
@MangaSourceParser("OTSUGAMI", "Otsugami", "id")
|
||||
internal class Otsugami(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.OTSUGAMI, "otsugami.id", pageSize = 40, searchPageSize = 10) {
|
||||
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.it
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("WITCOMICS", "Wit Comics", "it")
|
||||
internal class WitComics(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.WITCOMICS, "www.witcomics.net", pageSize = 5, searchPageSize = 10)
|
||||
@ -0,0 +1,21 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mmrcms.fr
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mmrcms.MmrcmsParser
|
||||
import java.util.Locale
|
||||
|
||||
|
||||
@MangaSourceParser("SCANMANGAVF_WS", "Scan Manga Vf Ws", "fr")
|
||||
internal class ScanMangaVfWs(context: MangaLoaderContext) :
|
||||
MmrcmsParser(context, MangaSource.SCANMANGAVF_WS, "scanmanga-vf.ws") {
|
||||
|
||||
override val imgUpdated = ".jpg"
|
||||
|
||||
override val selectTag = "dt:contains(Genres)"
|
||||
override val selectAlt = "dt:contains(Appelé aussi)"
|
||||
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
}
|
||||
@ -0,0 +1,300 @@
|
||||
package org.koitharu.kotatsu.parsers.site.wpcomics
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal abstract class WpComicsParser(
|
||||
context: MangaLoaderContext,
|
||||
source: MangaSource,
|
||||
domain: String,
|
||||
pageSize: Int = 48,
|
||||
) : PagedMangaParser(context, source, pageSize) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain(domain)
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
protected open val listUrl = "/the-loai"
|
||||
protected open val datePattern = "dd/MM/yy"
|
||||
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
|
||||
@JvmField
|
||||
protected val ongoing: Set<String> = setOf(
|
||||
"Đang tiến hành",
|
||||
"Ongoing"
|
||||
)
|
||||
|
||||
@JvmField
|
||||
protected val finished: Set<String> = setOf(
|
||||
"Hoàn thành",
|
||||
"Completed "
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append(listUrl)
|
||||
|
||||
if(!tags.isNullOrEmpty()){
|
||||
append("/")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
|
||||
if(!query.isNullOrEmpty()){
|
||||
append("&keyword=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
|
||||
append("&sort=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("10")
|
||||
SortOrder.UPDATED -> append("")
|
||||
SortOrder.NEWEST -> append("15")
|
||||
else -> append("")
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("h3").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain$listUrl").parseHtml()
|
||||
return doc.select("div.genres ul li:not(.active)").mapNotNullToSet { li ->
|
||||
val a = li.selectFirst("a") ?: return@mapNotNullToSet null
|
||||
val href = a.attr("href").removeSuffix('/').substringAfterLast('/')
|
||||
MangaTag(
|
||||
key = href,
|
||||
title = a.text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectDesc = "div.detail-content p"
|
||||
protected open val selectState = "div.col-info li.status p:not(.name)"
|
||||
protected open val selectAut = "div.col-info li.author p:not(.name)"
|
||||
protected open val selectTag = "div.col-info li.kind p:not(.name) a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.selectFirst(selectState)
|
||||
|
||||
val state = stateDiv?.let {
|
||||
when (it.text()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val aut = doc.body().select(selectAut).text()
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = null,
|
||||
author = aut,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
protected open val selectDate = "div.col-xs-4"
|
||||
protected open val selectChapter = "div#nt_listchapter li:not(.heading)"
|
||||
|
||||
protected open suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
|
||||
val findHours = dateText?.contains(":")
|
||||
val dateFormat = if(findHours == true)
|
||||
{
|
||||
SimpleDateFormat("HH:mm dd/MM", sourceLocale)
|
||||
}else
|
||||
{
|
||||
SimpleDateFormat(datePattern, sourceLocale)
|
||||
}
|
||||
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected open val selectPage = "div.reading-detail img"
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
|
||||
return doc.select(selectPage).map { url ->
|
||||
val img = url.src()?.toRelativeUrl(domain) ?: url.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(img),
|
||||
url = img,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") ||
|
||||
d.endsWith(" trước") // Handle translated 'ago' in Viêt Nam.
|
||||
-> parseRelativeDate(date)
|
||||
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
"d",
|
||||
"ngày ",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet("jam", "saat", "heure", "hora", "horas", "hour", "hours", "h").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"min",
|
||||
"minute",
|
||||
"minutes",
|
||||
"mins",
|
||||
"phút",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,102 @@
|
||||
package org.koitharu.kotatsu.parsers.site.wpcomics.en
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.wpcomics.WpComicsParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.util.EnumSet
|
||||
|
||||
@MangaSourceParser("XOXOCOMICS", "Xoxo Comics", "vi", ContentType.COMICS)
|
||||
internal class XoxoComics(context: MangaLoaderContext) :
|
||||
WpComicsParser(context, MangaSource.XOXOCOMICS, "xoxocomics.net", 50){
|
||||
|
||||
override val listUrl = "/genre"
|
||||
override val datePattern = "MM/dd/yyyy"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
|
||||
if(!query.isNullOrEmpty()){
|
||||
append("/search?keyword=")
|
||||
append(query.urlEncoded())
|
||||
append("&page=")
|
||||
append(page.toString())
|
||||
}else
|
||||
{
|
||||
append(listUrl)
|
||||
if(!tags.isNullOrEmpty()){
|
||||
append("/")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
|
||||
append("/")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("popular")
|
||||
SortOrder.UPDATED -> append("")
|
||||
SortOrder.NEWEST -> append("newest")
|
||||
SortOrder.ALPHABETICAL -> append("alphabet")
|
||||
else -> append("")
|
||||
}
|
||||
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("h3").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain) + "/all"
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
|
||||
return doc.select(selectPage).map { url ->
|
||||
val img = url.src()?.toRelativeUrl(domain) ?: url.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(img),
|
||||
url = img,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package org.koitharu.kotatsu.parsers.site.wpcomics.vi
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.wpcomics.WpComicsParser
|
||||
|
||||
|
||||
@MangaSourceParser("NETTRUYENMAX", "Nettruyenmax", "vi")
|
||||
internal class Nettruyenmax(context: MangaLoaderContext) :
|
||||
WpComicsParser(context, MangaSource.NETTRUYENMAX, "www.nettruyenmax.com", 35){
|
||||
|
||||
override val listUrl = "/tim-truyen"
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.wpcomics.vi
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.wpcomics.WpComicsParser
|
||||
|
||||
|
||||
@MangaSourceParser("NHATTRUYENMIN", "Nhattruyenmin", "vi")
|
||||
internal class Nhattruyenmin(context: MangaLoaderContext) :
|
||||
WpComicsParser(context, MangaSource.NHATTRUYENMIN, "nhattruyenmin.com")
|
||||
@ -0,0 +1,302 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal abstract class ZMangaParser(
|
||||
context: MangaLoaderContext,
|
||||
source: MangaSource,
|
||||
domain: String,
|
||||
pageSize: Int = 16,
|
||||
) : PagedMangaParser(context, source, pageSize) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain(domain)
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.RATING,
|
||||
)
|
||||
|
||||
protected open val listUrl = "advanced-search/"
|
||||
protected open val datePattern = "MMMM d, yyyy"
|
||||
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
|
||||
@JvmField
|
||||
protected val ongoing: Set<String> = setOf(
|
||||
"On Going",
|
||||
"Ongoing",
|
||||
)
|
||||
|
||||
@JvmField
|
||||
protected val finished: Set<String> = setOf(
|
||||
"Completed",
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/$listUrl")
|
||||
if(page > 1){
|
||||
append("page/")
|
||||
append(page.toString())
|
||||
append("/")
|
||||
}
|
||||
|
||||
append("?order=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("popular")
|
||||
SortOrder.UPDATED -> append("update")
|
||||
SortOrder.ALPHABETICAL -> append("title")
|
||||
SortOrder.NEWEST -> append("latest")
|
||||
SortOrder.RATING -> append("rating")
|
||||
}
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("&title=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
for (tag in tags) {
|
||||
append("&")
|
||||
append("genre[]".urlEncoded())
|
||||
append("=")
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.flexbox2-item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("div.flexbox2-title span:not(.studio)").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = div.selectFirstOrThrow("div.info div.score").ownText().toFloatOrNull()?.div(10f)
|
||||
?: RATING_UNKNOWN,
|
||||
tags = doc.body().select("div.genres a").mapNotNullToSet { span ->
|
||||
MangaTag(
|
||||
key = span.attr("class"),
|
||||
title = span.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/$listUrl").parseHtml()
|
||||
return doc.select("tr.gnrx div.custom-control").mapNotNullToSet { checkbox ->
|
||||
val key = checkbox.selectFirstOrThrow("input").attr("value") ?: return@mapNotNullToSet null
|
||||
val name = checkbox.selectFirstOrThrow("label").text()
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectDesc = "div.series-synops"
|
||||
protected open val selectState = "span.status"
|
||||
protected open val selectAlt = "div.series-infolist li:contains(Alt) span"
|
||||
protected open val selectAut = "div.series-infolist li:contains(Author) span"
|
||||
protected open val selectTag = "div.series-genres a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.selectFirst(selectState)
|
||||
|
||||
val state = stateDiv?.let {
|
||||
when (it.text()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt = doc.body().select(selectAlt).text()
|
||||
|
||||
val aut = doc.body().select(selectAut).text()
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().toTitleCase().replace(",", ""),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = aut,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
isNsfw = manga.isNsfw || doc.getElementById("adt-warning") != null,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
protected open val selectDate = "span.date"
|
||||
protected open val selectChapter = "ul.series-chapterlist li"
|
||||
|
||||
protected open suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = li.selectFirstOrThrow(".flexch-infoz span:not(.date)").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectPage = "div.reader-area img"
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
return doc.select(selectPage).map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") ||
|
||||
// short Hours
|
||||
d.endsWith(" h") ||
|
||||
// short Day
|
||||
d.endsWith(" d") -> parseRelativeDate(date)
|
||||
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet("hour", "hours", "h").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"min",
|
||||
"minute",
|
||||
"minutes",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
import java.util.Locale
|
||||
|
||||
|
||||
@MangaSourceParser("HENSEKAI", "Hensekai", "id", ContentType.HENTAI)
|
||||
internal class Hensekai(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.HENSEKAI, "hensekai.com") {
|
||||
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
|
||||
|
||||
@MangaSourceParser("KOMIKINDO_INFO", "KomikIndo Info", "id", ContentType.HENTAI)
|
||||
internal class KomikIndoInfo(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.KOMIKINDO_INFO, "komikindo.info") {
|
||||
|
||||
override val datePattern = "dd MMM yyyy"
|
||||
|
||||
}
|
||||
@ -0,0 +1,44 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
|
||||
import org.koitharu.kotatsu.parsers.util.generateUid
|
||||
import org.koitharu.kotatsu.parsers.util.mapChapters
|
||||
import org.koitharu.kotatsu.parsers.util.selectFirstOrThrow
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
// Info: Some scans are password-protected
|
||||
@MangaSourceParser("MAID_ID", "Maid Id", "id")
|
||||
internal class MaidId(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.MAID_ID, "www.maid.my.id"){
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
val numChapter = li.selectFirstOrThrow(".flexch-infoz span").html().substringAfterLast("Chapter ").substringBefore("<span")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = "Chapter $numChapter",
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
|
||||
|
||||
@MangaSourceParser("NEU_MANGA", "Neu Manga", "id")
|
||||
internal class NeuManga(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.NEU_MANGA, "neumanga.net")
|
||||
@ -0,0 +1,44 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
|
||||
import org.koitharu.kotatsu.parsers.util.generateUid
|
||||
import org.koitharu.kotatsu.parsers.util.mapChapters
|
||||
import org.koitharu.kotatsu.parsers.util.selectFirstOrThrow
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
@MangaSourceParser("SHIRO_DOUJIN", "Shiro Doujin", "id", ContentType.HENTAI)
|
||||
internal class ShiroDoujin(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.SHIRO_DOUJIN, "shirodoujin.com"){
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
val numChapter = li.selectFirstOrThrow(".flexch-infoz span").html().substringAfterLast("Chapter ").substringBefore("<span")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = "Chapter $numChapter",
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue