[fmreader] add SearchWithFilters
[LegacyScans] add ContentTypes, SortOrder.UPDATED [MangaMana] add SortOrder.RATING_ASC , SortOrder.NEWEST_ASC [Manhwa18.com] move to /en Add new option on .toAbsoluteUrl()master
parent
cc62981f12
commit
2061a971b8
@ -0,0 +1,245 @@
|
|||||||
|
package org.koitharu.kotatsu.parsers.site.en
|
||||||
|
|
||||||
|
import androidx.collection.ArrayMap
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||||
|
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||||
|
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||||
|
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||||
|
import org.koitharu.kotatsu.parsers.model.*
|
||||||
|
import org.koitharu.kotatsu.parsers.util.*
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@MangaSourceParser("MANHWA18COM", "Manhwa18.com", "en", type = ContentType.HENTAI)
|
||||||
|
internal class Manhwa18Com(context: MangaLoaderContext) :
|
||||||
|
PagedMangaParser(context, MangaParserSource.MANHWA18COM, pageSize = 18, searchPageSize = 18) {
|
||||||
|
|
||||||
|
override val configKeyDomain: ConfigKey.Domain = ConfigKey.Domain("manhwa18.com")
|
||||||
|
|
||||||
|
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
|
||||||
|
super.onCreateConfig(keys)
|
||||||
|
keys.add(userAgentKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
override val availableSortOrders: Set<SortOrder>
|
||||||
|
get() = EnumSet.of(
|
||||||
|
SortOrder.UPDATED,
|
||||||
|
SortOrder.POPULARITY,
|
||||||
|
SortOrder.ALPHABETICAL,
|
||||||
|
SortOrder.NEWEST,
|
||||||
|
SortOrder.RATING,
|
||||||
|
)
|
||||||
|
|
||||||
|
override val filterCapabilities: MangaListFilterCapabilities
|
||||||
|
get() = MangaListFilterCapabilities(
|
||||||
|
isMultipleTagsSupported = true,
|
||||||
|
isTagsExclusionSupported = true,
|
||||||
|
isSearchSupported = true,
|
||||||
|
isSearchWithFiltersSupported = true,
|
||||||
|
)
|
||||||
|
|
||||||
|
override suspend fun getFilterOptions() = MangaListFilterOptions(
|
||||||
|
availableTags = tagsMap.get().values.toSet(),
|
||||||
|
availableStates = EnumSet.of(
|
||||||
|
MangaState.ONGOING,
|
||||||
|
MangaState.FINISHED,
|
||||||
|
MangaState.PAUSED,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
override suspend fun getFavicons(): Favicons {
|
||||||
|
return Favicons(
|
||||||
|
listOf(
|
||||||
|
Favicon("https://$domain/uploads/logos/logo-mini.png", 92, null),
|
||||||
|
),
|
||||||
|
domain,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
|
||||||
|
val url = buildString {
|
||||||
|
append("https://")
|
||||||
|
append(domain)
|
||||||
|
append("/tim-kiem?page=")
|
||||||
|
append(page.toString())
|
||||||
|
|
||||||
|
filter.query?.let {
|
||||||
|
append("&q=")
|
||||||
|
append(filter.query.urlEncoded())
|
||||||
|
}
|
||||||
|
|
||||||
|
append("&accept_genres=")
|
||||||
|
if (filter.tags.isNotEmpty()) {
|
||||||
|
append(
|
||||||
|
filter.tags.joinToString(",") { it.key },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
append("&reject_genres=")
|
||||||
|
if (filter.tagsExclude.isNotEmpty()) {
|
||||||
|
append(
|
||||||
|
filter.tagsExclude.joinToString(",") { it.key },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
append("&sort=")
|
||||||
|
append(
|
||||||
|
when (order) {
|
||||||
|
SortOrder.ALPHABETICAL -> "az"
|
||||||
|
SortOrder.ALPHABETICAL_DESC -> "za"
|
||||||
|
SortOrder.POPULARITY -> "top"
|
||||||
|
SortOrder.UPDATED -> "update"
|
||||||
|
SortOrder.NEWEST -> "new"
|
||||||
|
SortOrder.RATING -> "like"
|
||||||
|
else -> "update"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
filter.states.oneOrThrowIfMany()?.let {
|
||||||
|
append("&status=")
|
||||||
|
append(
|
||||||
|
when (it) {
|
||||||
|
MangaState.ONGOING -> "1"
|
||||||
|
MangaState.FINISHED -> "3"
|
||||||
|
MangaState.PAUSED -> "2"
|
||||||
|
else -> ""
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support author
|
||||||
|
// filter.author.let{
|
||||||
|
// the
|
||||||
|
// append("&artist=")
|
||||||
|
// append(filter.author)
|
||||||
|
// }
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
val docs = webClient.httpGet(url).parseHtml()
|
||||||
|
|
||||||
|
return docs.select(".card-body .thumb-item-flow")
|
||||||
|
.map {
|
||||||
|
val titleElement = it.selectFirstOrThrow(".thumb_attr.series-title > a")
|
||||||
|
val absUrl = titleElement.attrAsAbsoluteUrl("href")
|
||||||
|
Manga(
|
||||||
|
id = generateUid(absUrl.toRelativeUrl(domain)),
|
||||||
|
title = titleElement.text(),
|
||||||
|
altTitle = null,
|
||||||
|
url = absUrl.toRelativeUrl(domain),
|
||||||
|
publicUrl = absUrl,
|
||||||
|
rating = RATING_UNKNOWN,
|
||||||
|
isNsfw = true,
|
||||||
|
coverUrl = it.selectFirst("div.img-in-ratio")?.attrAsAbsoluteUrl("data-bg").orEmpty(),
|
||||||
|
tags = emptySet(),
|
||||||
|
state = null,
|
||||||
|
author = null,
|
||||||
|
largeCoverUrl = null,
|
||||||
|
description = null,
|
||||||
|
source = MangaParserSource.MANHWA18,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getDetails(manga: Manga): Manga {
|
||||||
|
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||||
|
val cardInfoElement = docs.selectFirst("div.series-information")
|
||||||
|
val author = cardInfoElement?.selectFirst(".info-name:contains(Author)")?.parent()
|
||||||
|
?.select("a")
|
||||||
|
?.joinToString(", ") { it.text() }
|
||||||
|
val availableTags = tagsMap.get()
|
||||||
|
val tags = cardInfoElement?.selectFirst(".info-name:contains(Genre)")?.parent()
|
||||||
|
?.select("a")
|
||||||
|
?.mapNotNullToSet { availableTags[it.text().lowercase(Locale.ENGLISH)] }
|
||||||
|
val state = cardInfoElement?.selectFirst(".info-name:contains(Status)")?.parent()
|
||||||
|
?.selectFirst("a")
|
||||||
|
?.let {
|
||||||
|
when (it.text().lowercase()) {
|
||||||
|
"on going" -> MangaState.ONGOING
|
||||||
|
"completed" -> MangaState.FINISHED
|
||||||
|
"on hold" -> MangaState.PAUSED
|
||||||
|
else -> null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return manga.copy(
|
||||||
|
altTitle = cardInfoElement?.selectFirst("b:contains(Other names)")?.parent()?.ownText()?.removePrefix(": "),
|
||||||
|
author = author,
|
||||||
|
description = docs.selectFirst(".series-summary .summary-content")?.html(),
|
||||||
|
tags = tags.orEmpty(),
|
||||||
|
state = state,
|
||||||
|
chapters = docs.select(".card-body > .list-chapters > a").mapChapters(reversed = true) { index, element ->
|
||||||
|
val chapterUrl = element.attrAsAbsoluteUrlOrNull("href")?.toRelativeUrl(domain)
|
||||||
|
?: return@mapChapters null
|
||||||
|
val uploadDate = parseUploadDate(element.selectFirst(".chapter-time")?.text())
|
||||||
|
MangaChapter(
|
||||||
|
id = generateUid(chapterUrl),
|
||||||
|
name = element.selectFirst(".chapter-name")?.text().orEmpty(),
|
||||||
|
number = index + 1f,
|
||||||
|
volume = 0,
|
||||||
|
url = chapterUrl,
|
||||||
|
scanlator = null,
|
||||||
|
uploadDate = uploadDate,
|
||||||
|
branch = null,
|
||||||
|
source = MangaParserSource.MANHWA18,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun parseUploadDate(timeStr: String?): Long {
|
||||||
|
timeStr ?: return 0
|
||||||
|
val timeWords = timeStr.split(' ')
|
||||||
|
if (timeWords.size != 3) return 0
|
||||||
|
val timeWord = timeWords[1]
|
||||||
|
val timeAmount = timeWords[0].toIntOrNull() ?: return 0
|
||||||
|
val timeUnit = when (timeWord) {
|
||||||
|
"minute", "minutes" -> Calendar.MINUTE
|
||||||
|
"hour", "hours" -> Calendar.HOUR
|
||||||
|
"day", "days" -> Calendar.DAY_OF_YEAR
|
||||||
|
"week", "weeks" -> Calendar.WEEK_OF_YEAR
|
||||||
|
"month", "months" -> Calendar.MONTH
|
||||||
|
"year", "years" -> Calendar.YEAR
|
||||||
|
else -> return 0
|
||||||
|
}
|
||||||
|
val cal = Calendar.getInstance()
|
||||||
|
cal.add(timeUnit, -timeAmount)
|
||||||
|
return cal.time.time
|
||||||
|
}
|
||||||
|
|
||||||
|
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||||
|
val chapterUrl = chapter.url.toAbsoluteUrl(domain)
|
||||||
|
val doc = webClient.httpGet(chapterUrl).parseHtml()
|
||||||
|
return doc.requireElementById("chapter-content").select("img").mapNotNull {
|
||||||
|
val url = it.attrAsRelativeUrlOrNull("data-src")
|
||||||
|
?: it.attrAsRelativeUrlOrNull("src")
|
||||||
|
?: return@mapNotNull null
|
||||||
|
MangaPage(
|
||||||
|
id = generateUid(url),
|
||||||
|
url = url,
|
||||||
|
preview = null,
|
||||||
|
source = MangaParserSource.MANHWA18,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private val tagsMap = SuspendLazy(::parseTags)
|
||||||
|
|
||||||
|
private suspend fun parseTags(): Map<String, MangaTag> {
|
||||||
|
val doc = webClient.httpGet("https://$domain/tim-kiem?q=").parseHtml()
|
||||||
|
val list = doc.getElementsByAttribute("data-genre-id")
|
||||||
|
if (list.isEmpty()) {
|
||||||
|
return emptyMap()
|
||||||
|
}
|
||||||
|
val result = ArrayMap<String, MangaTag>(list.size)
|
||||||
|
for (item in list) {
|
||||||
|
val id = item.attr("data-genre-id")
|
||||||
|
val name = item.text()
|
||||||
|
result[name.lowercase(Locale.ENGLISH)] = MangaTag(
|
||||||
|
title = name.toTitleCase(Locale.ENGLISH),
|
||||||
|
key = id,
|
||||||
|
source = source,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,140 +0,0 @@
|
|||||||
package org.koitharu.kotatsu.parsers.site.fmreader.en
|
|
||||||
|
|
||||||
import kotlinx.coroutines.async
|
|
||||||
import kotlinx.coroutines.coroutineScope
|
|
||||||
import org.jsoup.nodes.Document
|
|
||||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
|
||||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
|
||||||
import org.koitharu.kotatsu.parsers.model.*
|
|
||||||
import org.koitharu.kotatsu.parsers.site.fmreader.FmreaderParser
|
|
||||||
import org.koitharu.kotatsu.parsers.util.*
|
|
||||||
import java.text.SimpleDateFormat
|
|
||||||
|
|
||||||
@MangaSourceParser("MANHWA18COM", "Manhwa18.com", "en", ContentType.HENTAI)
|
|
||||||
internal class Manhwa18Com(context: MangaLoaderContext) :
|
|
||||||
FmreaderParser(context, MangaParserSource.MANHWA18COM, "manhwa18.com") {
|
|
||||||
|
|
||||||
override val listUrl = "/tim-kiem"
|
|
||||||
override val selectState = "div.info-item:contains(Status) span.info-value "
|
|
||||||
override val selectAlt = "div.info-item:contains(Other name) span.info-value "
|
|
||||||
override val selectTag = "div.info-item:contains(Genre) span.info-value a"
|
|
||||||
override val datePattern = "dd/MM/yyyy"
|
|
||||||
override val selectPage = "div#chapter-content img"
|
|
||||||
override val selectBodyTag = "div.advanced-wrapper .genre_label"
|
|
||||||
|
|
||||||
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
|
|
||||||
val url = buildString {
|
|
||||||
append("https://")
|
|
||||||
append(domain)
|
|
||||||
append("/tim-kiem?page=")
|
|
||||||
append(page.toString())
|
|
||||||
|
|
||||||
when {
|
|
||||||
!filter.query.isNullOrEmpty() -> {
|
|
||||||
append("&q=")
|
|
||||||
append(filter.query.urlEncoded())
|
|
||||||
}
|
|
||||||
|
|
||||||
else -> {
|
|
||||||
|
|
||||||
append("&accept_genres=")
|
|
||||||
append(filter.tags.joinToString(",") { it.key })
|
|
||||||
|
|
||||||
append("&reject_genres=")
|
|
||||||
append(filter.tagsExclude.joinToString(",") { it.key })
|
|
||||||
|
|
||||||
append("&sort=")
|
|
||||||
append(
|
|
||||||
when (order) {
|
|
||||||
SortOrder.ALPHABETICAL -> "az"
|
|
||||||
SortOrder.ALPHABETICAL_DESC -> "za"
|
|
||||||
SortOrder.POPULARITY -> "top"
|
|
||||||
SortOrder.UPDATED -> "update"
|
|
||||||
SortOrder.NEWEST -> "new"
|
|
||||||
SortOrder.RATING -> "like"
|
|
||||||
else -> null
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
filter.states.oneOrThrowIfMany()?.let {
|
|
||||||
append("&status=")
|
|
||||||
append(
|
|
||||||
when (it) {
|
|
||||||
MangaState.ONGOING -> "1"
|
|
||||||
MangaState.FINISHED -> "3"
|
|
||||||
MangaState.PAUSED -> "2"
|
|
||||||
else -> ""
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return parseMangaList(webClient.httpGet(url).parseHtml())
|
|
||||||
}
|
|
||||||
|
|
||||||
override suspend fun fetchAvailableTags(): Set<MangaTag> {
|
|
||||||
val doc = webClient.httpGet("https://$domain/$listUrl").parseHtml()
|
|
||||||
return doc.select(selectBodyTag).mapNotNullToSet { label ->
|
|
||||||
val key = label.attr("data-genre-id")
|
|
||||||
MangaTag(
|
|
||||||
key = key,
|
|
||||||
title = label.selectFirstOrThrow(".gerne-name").text(),
|
|
||||||
source = source,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
|
||||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
|
||||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
|
||||||
val chaptersDeferred = async { getChapters(doc) }
|
|
||||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
|
||||||
val stateDiv = doc.selectFirst(selectState)
|
|
||||||
val state = stateDiv?.let {
|
|
||||||
when (it.text().lowercase()) {
|
|
||||||
in ongoing -> MangaState.ONGOING
|
|
||||||
in finished -> MangaState.FINISHED
|
|
||||||
else -> null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val alt = doc.body().selectFirst(selectAlt)?.text()?.replace("Other name", "")
|
|
||||||
val auth = doc.body().selectFirst(selectAut)?.text()
|
|
||||||
manga.copy(
|
|
||||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
|
||||||
MangaTag(
|
|
||||||
key = a.attr("href").substringAfter("manga-list-genre-").substringBeforeLast(".html"),
|
|
||||||
title = a.text().toTitleCase(),
|
|
||||||
source = source,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
description = desc,
|
|
||||||
altTitle = alt,
|
|
||||||
author = auth,
|
|
||||||
state = state,
|
|
||||||
chapters = chaptersDeferred.await(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
override suspend fun getChapters(doc: Document): List<MangaChapter> {
|
|
||||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
|
||||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, a ->
|
|
||||||
val href = a.attrAsRelativeUrl("href")
|
|
||||||
val dateText = a.selectFirst(selectDate)?.text()?.substringAfter("- ")
|
|
||||||
MangaChapter(
|
|
||||||
id = generateUid(href),
|
|
||||||
name = a.selectFirstOrThrow("div.chapter-name").text(),
|
|
||||||
number = i + 1f,
|
|
||||||
volume = 0,
|
|
||||||
url = href,
|
|
||||||
uploadDate = parseChapterDate(
|
|
||||||
dateFormat,
|
|
||||||
dateText,
|
|
||||||
),
|
|
||||||
source = source,
|
|
||||||
scanlator = null,
|
|
||||||
branch = null,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Loading…
Reference in New Issue