fix getlist OtakuSanctuaryParser, bakai , brmangas, LerMangaOnline, MangaOnline, YugenMangas, MangaAy, SadScans, YaoiFlix

add filter on SinmhParser, TrWebtoon
pull/401/head
devi 2 years ago
parent 26b758ae50
commit 6ad78f1cba

@ -41,7 +41,6 @@ internal abstract class MangaReaderParser(
private var tagCache: ArrayMap<String, MangaTag>? = null
private val mutex = Mutex()
protected open var lastSearchPage = 1
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val url = buildString {

@ -25,7 +25,7 @@ internal abstract class OtakuSanctuaryParser(
SortOrder.NEWEST,
)
protected open val listeurl = "Manga/Newest"
protected open val listUrl = "Manga/Newest"
protected open val datePattern = "dd/MM/yyyy"
protected open val lang = ""
@ -40,60 +40,71 @@ internal abstract class OtakuSanctuaryParser(
"Done",
)
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
override val isMultipleTagsSupported = false
val doc = if (!tags.isNullOrEmpty()) {
val url = buildString {
append("https://$domain/Genre/MangaGenrePartial?id=")
append(tag?.key.orEmpty())
append("&lang=$lang")
append("&offset=")
append(page * pageSize)
append("&pagesize=$pageSize")
}
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val doc =
when (filter) {
webClient.httpGet(url).parseHtml()
} else if (!query.isNullOrEmpty()) {
is MangaListFilter.Search -> {
if (page > 1) {
return emptyList()
}
val url = buildString {
append("https://$domain/Home/Search?search=")
append(query.urlEncoded())
append("https://")
append(domain)
append("/Home/Search?search=")
append(filter.query.urlEncoded())
}
webClient.httpGet(url).parseHtml().requireElementById("collection-manga")
}
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty()) {
val url = buildString {
append("https://")
append(domain)
append("/Genre/MangaGenrePartial?id=")
filter.tags.oneOrThrowIfMany()?.let {
append(it.key)
}
append("&lang=")
append(lang)
append("&offset=")
append(page * pageSize)
append("&pagesize=")
append(pageSize)
}
webClient.httpGet(url).parseHtml()
} else {
val url = "https://$domain/$listeurl"
val payload = HashMap<String, String>()
payload["Lang"] = lang
payload["Page"] = page.toString()
payload["Type"] = "Include"
when (sortOrder) {
when (filter.sortOrder) {
SortOrder.NEWEST -> payload["Dir"] = "CreatedDate"
SortOrder.UPDATED -> payload["Dir"] = "NewPostedDate"
else -> payload["Dir"] = "NewPostedDate"
}
webClient.httpPost(url, payload).parseHtml()
webClient.httpPost("https://$domain/$listUrl", payload).parseHtml()
}
}
val mangas = if (!query.isNullOrEmpty()) {
doc.requireElementById("collection-manga").select("div.picture-card")
} else {
doc.select("div.picture-card")
null -> {
val payload = HashMap<String, String>()
payload["Lang"] = lang
payload["Page"] = page.toString()
payload["Type"] = "Include"
payload["Dir"] = "NewPostedDate"
webClient.httpPost("https://$domain/$listUrl", payload).parseHtml()
}
}
return mangas.map { div ->
return doc.select("div.picture-card").map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),

@ -1,6 +1,7 @@
package org.koitharu.kotatsu.parsers.site.pt
import okhttp3.Headers
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
@ -16,70 +17,94 @@ internal class Bakai(context: MangaLoaderContext) : PagedMangaParser(context, Ma
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.UPDATED)
override val configKeyDomain = ConfigKey.Domain("bakai.org")
override val headers: Headers = Headers.Builder()
.add("User-Agent", UserAgents.CHROME_MOBILE)
.add("User-Agent", UserAgents.CHROME_DESKTOP)
.build()
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
when (filter) {
is MangaListFilter.Search -> {
val url = buildString {
append("https://")
append(domain)
if (!query.isNullOrEmpty()) {
append("/search/?q=")
append(query.urlEncoded())
append("&quick=1&type=cms_records1&page=")
append("/search1/?q=")
append(filter.query.urlEncoded())
append("&quick=1&type=cms_records1&updated_after=any&sortby=newest&page=")
append(page.toString())
} else if (!tags.isNullOrEmpty()) {
append("/search/?tags=")
for (tag in tags) {
append(tag.key)
append(",")
}
append("&quick=1&type=cms_records1&page=")
return parseMangaListQueryOrTags(webClient.httpGet(url).parseHtml())
}
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty()) {
val url = buildString {
append("https://")
append(domain)
append("/search1/?tags=")
append(filter.tags.joinToString(separator = ",") { it.key })
append("&updated_after=any&sortby=newest&search_and_or=and&page=")
append(page.toString())
}
return parseMangaListQueryOrTags(webClient.httpGet(url).parseHtml())
} else {
append("/hentai/")
append("page/")
val url = buildString {
append("https://")
append(domain)
append("/hentai/page/")
append(page.toString())
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
val doc = webClient.httpGet(url).parseHtml()
if (!tags.isNullOrEmpty() or !query.isNullOrEmpty()) {
return doc.select("ol.ipsStream li.ipsStreamItem")
}
null -> {
val url = buildString {
append("https://")
append(domain)
append("/hentai/page/")
append(page.toString())
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
}
}
private fun parseMangaList(doc: Document): List<Manga> {
return doc.select("section.ipsType_normal li.ipsGrid_span4")
.map { div ->
val href = div.selectFirstOrThrow("div.ipsStreamItem_snippet a").attrAsRelativeUrl("href")
val href = div.selectFirstOrThrow("h2.ipsType_pageTitle a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
title = div.selectFirstOrThrow("h2.ipsStreamItem_title").text(),
title = div.selectFirstOrThrow("h2.ipsType_pageTitle").text(),
altTitle = null,
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
isNsfw = true,
coverUrl = div.selectFirstOrThrow("span.ipsThumb img").attrAsAbsoluteUrl("src"),
coverUrl = div.selectFirst("img")?.src().orEmpty(),
tags = setOf(),
state = null,
author = null,
source = source,
)
}
} else {
return doc.select("section.ipsType_normal li.ipsGrid_span4")
.map { div ->
val href = div.selectFirstOrThrow("h2.ipsType_pageTitle a").attrAsRelativeUrl("href")
}
private fun parseMangaListQueryOrTags(doc: Document): List<Manga> {
return doc.select("ol.ipsStream li.ipsStreamItem")
.mapNotNull { div ->
val href =
div.selectFirst(".ipsStreamItem_snippet a")?.attrAsRelativeUrl("href") ?: return@mapNotNull null
Manga(
id = generateUid(href),
title = div.selectFirstOrThrow("h2.ipsType_pageTitle").text(),
title = div.selectFirstOrThrow("h2.ipsStreamItem_title").text(),
altTitle = null,
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
isNsfw = true,
coverUrl = div.selectFirstOrThrow("img").attrAsAbsoluteUrl("src"),
coverUrl = div.selectFirst(".ipsStreamItem_snippet img")?.src().orEmpty(),
tags = setOf(),
state = null,
author = null,
@ -87,7 +112,6 @@ internal class Bakai(context: MangaLoaderContext) : PagedMangaParser(context, Ma
)
}
}
}
override suspend fun getAvailableTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain").parseHtml()

@ -17,33 +17,37 @@ internal class BrMangas(context: MangaLoaderContext) : PagedMangaParser(context,
override val configKeyDomain = ConfigKey.Domain("www.brmangas.net")
override val isMultipleTagsSupported = false
override val headers: Headers = Headers.Builder()
.add("User-Agent", UserAgents.CHROME_DESKTOP)
.build()
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val url = buildString {
append("https://$domain/")
if (!tags.isNullOrEmpty()) {
append("category/")
append(tag?.key.orEmpty())
append("https://")
append(domain)
append('/')
when (filter) {
is MangaListFilter.Search -> {
if (page > 1) {
append("/page/$page/")
}
} else if (!query.isNullOrEmpty()) {
append("/?s=")
append(filter.query.urlEncoded())
}
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty()) {
filter.tags.oneOrThrowIfMany()?.let {
append("category/")
append(it.key)
if (page > 1) {
append("/page/$page/")
}
append("/?s=")
append(query.urlEncoded())
}
} else {
when (sortOrder) {
when (filter.sortOrder) {
SortOrder.POPULARITY -> append("/")
SortOrder.UPDATED -> append("manga/")
else -> append("manga/")
@ -52,16 +56,39 @@ internal class BrMangas(context: MangaLoaderContext) : PagedMangaParser(context,
append("page/$page/")
}
}
}
null -> {
append("manga/")
if (page > 1) {
append("page/$page/")
}
}
}
}
val doc = webClient.httpGet(url).parseHtml()
val item = if (sortOrder == SortOrder.POPULARITY) {
val item =
when (filter) {
is MangaListFilter.Search -> {
doc.select("div.listagem div.item")
}
is MangaListFilter.Advanced -> {
if (filter.sortOrder == SortOrder.POPULARITY && filter.tags.isEmpty()) {
doc.select("div.listagem")[1].select("div.item") // To remove the 6 mangas updated on the home page
} else {
doc.select("div.listagem div.item")
}
}
null -> doc.select("div.listagem div.item")
}
return item.map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(

@ -7,7 +7,6 @@ import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.lang.IllegalArgumentException
import java.text.SimpleDateFormat
import java.util.*
@ -18,30 +17,48 @@ class LerMangaOnline(context: MangaLoaderContext) : PagedMangaParser(context, Ma
override val configKeyDomain = ConfigKey.Domain("lermangaonline.com.br")
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
if (!query.isNullOrEmpty()) {
throw IllegalArgumentException("Search is not supported by this source")
}
val tag = tags.oneOrThrowIfMany()
override val isMultipleTagsSupported = false
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val url = buildString {
append("https://")
append(domain)
append('/')
when (filter) {
is MangaListFilter.Search -> {
if (page > 1) {
append("page/")
append(page.toString())
append("/")
if (!tags.isNullOrEmpty()) {
append(tag?.key.orEmpty())
append("/")
}
append("?s=")
append(filter.query.urlEncoded())
}
is MangaListFilter.Advanced -> {
filter.tags.oneOrThrowIfMany()?.let {
append(it.key)
append('/')
}
if (page > 1) {
append("page/")
append(page)
append("/")
append(page.toString())
append('/')
}
}
null -> {
if (page > 1) {
append("page/")
append(page.toString())
append('/')
}
}
}
}
return parseManga(webClient.httpGet(url).parseHtml())
}

@ -16,37 +16,40 @@ class MangaOnline(context: MangaLoaderContext) : PagedMangaParser(context, Manga
override val configKeyDomain = ConfigKey.Domain("mangaonline.biz")
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
override val isMultipleTagsSupported = false
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!tags.isNullOrEmpty() -> {
append("/genero/")
append(tag?.key.orEmpty())
append("/")
}
when (filter) {
!query.isNullOrEmpty() -> {
is MangaListFilter.Search -> {
append("/search/")
append(query.urlEncoded())
append("/")
append(filter.query.urlEncoded())
append('/')
}
else -> {
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty()) {
filter.tags.oneOrThrowIfMany()?.let {
append("/genero/")
append(it.key)
append('/')
}
} else {
append("/manga/")
}
}
null -> append("/manga/")
}
if (page > 1) {
append("page/")
append(page)
append("/")
append(page.toString())
append('/')
}
}
val doc = webClient.httpGet(url).parseHtml()

@ -8,7 +8,6 @@ import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.mapJSON
import org.koitharu.kotatsu.parsers.util.json.mapJSONIndexed
import java.text.DateFormat
import java.text.SimpleDateFormat
import java.util.*
@ -19,44 +18,71 @@ class YugenMangas(context: MangaLoaderContext) : PagedMangaParser(context, Manga
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.ALPHABETICAL, SortOrder.UPDATED)
override val configKeyDomain = ConfigKey.Domain("yugenmangas.net.br")
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val json =
if (!query.isNullOrEmpty()) {
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
if (page > 1) {
return emptyList()
}
val json =
when (filter) {
is MangaListFilter.Search -> {
val url = buildString {
append("https://api.")
append(domain)
append("/api/series/list/?query=")
append(query.urlEncoded())
append(filter.query.urlEncoded())
}
webClient.httpGet(url).parseJsonArray()
} else {
if (page > 1) {
return emptyList()
}
is MangaListFilter.Advanced -> {
if (filter.sortOrder == SortOrder.UPDATED) {
val url = buildString {
append("https://api.")
append(domain)
append("/api/latest_updates/")
}
webClient.httpGet(url).parseJsonArray()
} else {
val url = buildString {
append("https://api.")
append(domain)
append("/api/all_series/?page=1")
append("/api/all_series/")
}
webClient.httpGet(url).parseJson().getJSONArray("series")
}
}
null -> {
val url = buildString {
append("https://api.")
append(domain)
append("/api/latest_updates/")
}
webClient.httpGet(url).parseJsonArray()
}
}
return json.mapJSON { j ->
val slug = j.getString("slug")
val cover = if (!j.getString("cover").startsWith("https://")) {
// Some covers don't have the "/" so we ensure that the URL will be spelled correctly.
"https://$domain/media/" + j.getString("cover").removePrefix("/")
} else {
j.getString("cover")
}
Manga(
id = generateUid(slug),
url = slug,
publicUrl = slug,
title = j.getString("name"),
coverUrl = j.getString("cover"),
coverUrl = cover,
altTitle = null,
rating = RATING_UNKNOWN,
tags = emptySet(),
@ -90,7 +116,7 @@ class YugenMangas(context: MangaLoaderContext) : PagedMangaParser(context, Manga
else -> null
}
},
chapters = chapterManga.mapJSONIndexed { i, j ->
chapters = chapterManga.mapJSON { j ->
val url = "https://api.$domain/api/serie/${manga.url}/chapter/${j.getString("slug")}/images/imgs/"
MangaChapter(
id = generateUid(url),

@ -23,6 +23,10 @@ internal abstract class SinmhParser(
SortOrder.POPULARITY,
)
override val availableStates: Set<MangaState> = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED)
override val isMultipleTagsSupported = false
protected open val searchUrl = "search/"
protected open val listUrl = "list/"
@ -41,40 +45,55 @@ internal abstract class SinmhParser(
"已完结",
)
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!query.isNullOrEmpty() -> {
append("/$searchUrl?keywords=")
append(query.urlEncoded())
append('/')
when (filter) {
is MangaListFilter.Search -> {
append(searchUrl)
append("?keywords=")
append(filter.query.urlEncoded())
append("&page=")
append(page)
}
!tags.isNullOrEmpty() -> {
append("/$listUrl")
append(tag?.key.orEmpty())
append("/$page/")
is MangaListFilter.Advanced -> {
append(listUrl)
filter.tags.oneOrThrowIfMany()?.let {
append(it.key)
}
else -> {
filter.states.oneOrThrowIfMany()?.let {
append(
when (it) {
MangaState.ONGOING -> "-lianzai"
MangaState.FINISHED -> "-wanjie"
else -> ""
},
)
}
append("/$listUrl")
when (sortOrder) {
if (filter.tags.isNotEmpty() && filter.states.isNotEmpty()) {
append('/')
}
when (filter.sortOrder) {
SortOrder.POPULARITY -> append("click/")
SortOrder.UPDATED -> append("update/")
else -> append("")
else -> append("/")
}
append("?page=")
append(page)
append(page.toString())
append('/')
}
null -> {
append(listUrl)
append("update/")
append(page.toString())
append('/')
}
}

@ -3,6 +3,7 @@ package org.koitharu.kotatsu.parsers.site.tr
import androidx.collection.ArrayMap
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
@ -19,42 +20,35 @@ class MangaAy(context: MangaLoaderContext) : PagedMangaParser(context, MangaSour
override val configKeyDomain = ConfigKey.Domain("manga-ay.com")
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
if (!query.isNullOrEmpty() || !tags.isNullOrEmpty()) {
override val isMultipleTagsSupported = false
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
when (filter) {
is MangaListFilter.Search -> {
if (page > 1) {
return emptyList()
}
val url = "https://$domain/arama"
val doc = webClient.httpPost(
url,
mapOf(
"title" to query?.urlEncoded().orEmpty(),
"genres" to tag?.key.orEmpty(),
),
).parseHtml()
return doc.select(".table tr").map { tr ->
val a = tr.selectFirstOrThrow("a")
val href = a.attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
url = href,
publicUrl = a.attrAsAbsoluteUrl("href"),
title = a.text(),
coverUrl = "",
altTitle = null,
rating = RATING_UNKNOWN,
tags = emptySet(),
description = null,
state = null,
author = null,
isNsfw = isNsfwSource,
source = source,
return parseMangaListQueryOrTags(
webClient.httpPost(
"https://$domain/arama",
mapOf("title" to filter.query.urlEncoded(), "genres" to ""),
).parseHtml(),
)
}
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty()) {
filter.tags.oneOrThrowIfMany()?.let {
if (page > 1) {
return emptyList()
}
return parseMangaListQueryOrTags(
webClient.httpPost(
"https://$domain/arama",
mapOf("title" to "", "genres" to it.key),
).parseHtml(),
)
}
} else {
@ -67,8 +61,30 @@ class MangaAy(context: MangaLoaderContext) : PagedMangaParser(context, MangaSour
append(page)
}
}
val doc = webClient.httpGet(url).parseHtml().requireElementById("ecommerce-products")
return doc.select(".card").map { div ->
return parseMangaList(webClient.httpGet(url).parseHtml())
}
}
null -> {
val url = buildString {
append("https://")
append(domain)
append("/seriler")
if (page > 1) {
append("/")
append(page)
}
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
}
return emptyList()
}
private fun parseMangaList(doc: Document): List<Manga> {
return doc.requireElementById("ecommerce-products").select(".card").map { div ->
val a = div.selectFirstOrThrow("a")
val href = a.attrAsRelativeUrl("href")
Manga(
@ -88,6 +104,27 @@ class MangaAy(context: MangaLoaderContext) : PagedMangaParser(context, MangaSour
)
}
}
private fun parseMangaListQueryOrTags(doc: Document): List<Manga> {
return doc.select(".table tr").map { tr ->
val a = tr.selectFirstOrThrow("a")
val href = a.attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
url = href,
publicUrl = a.attrAsAbsoluteUrl("href"),
title = a.text(),
coverUrl = "",
altTitle = null,
rating = RATING_UNKNOWN,
tags = emptySet(),
description = null,
state = null,
author = null,
isNsfw = isNsfwSource,
source = source,
)
}
}
private var tagCache: ArrayMap<String, MangaTag>? = null

@ -15,20 +15,29 @@ internal class SadScans(context: MangaLoaderContext) : MangaParser(context, Mang
override val availableSortOrders: Set<SortOrder> = EnumSet.of(SortOrder.ALPHABETICAL)
override val configKeyDomain = ConfigKey.Domain("sadscans.com")
override suspend fun getList(offset: Int, query: String?, tags: Set<MangaTag>?, sortOrder: SortOrder): List<Manga> {
override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
if (offset > 0) {
return emptyList()
}
val url = buildString {
append("https://$domain/series")
if (!query.isNullOrEmpty()) {
append("https://")
append(domain)
append("/series")
when (filter) {
is MangaListFilter.Search -> {
append("?search=")
append(query.urlEncoded())
append(filter.query.urlEncoded())
}
is MangaListFilter.Advanced -> {}
null -> {}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select(".series-list").map { div ->
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
val href = "/" + div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
title = div.selectFirstOrThrow("h2").text(),
@ -64,7 +73,7 @@ internal class SadScans(context: MangaLoaderContext) : MangaParser(context, Mang
chapters = doc.select(".chap-section .chap")
.mapChapters(reversed = true) { i, div ->
val a = div.selectFirstOrThrow("a")
val url = a.attrAsRelativeUrl("href").toAbsoluteUrl(domain)
val url = "/" + a.attrAsRelativeUrl("href").toAbsoluteUrl(domain)
MangaChapter(
id = generateUid(url),
name = a.text(),

@ -1,11 +1,13 @@
package org.koitharu.kotatsu.parsers.site.tr
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.lang.IllegalArgumentException
import java.text.DateFormat
import java.text.SimpleDateFormat
import java.util.*
@ -16,66 +18,102 @@ class TrWebtoon(context: MangaLoaderContext) :
override val configKeyDomain: ConfigKey.Domain = ConfigKey.Domain("trwebtoon.com")
override val availableSortOrders: Set<SortOrder>
get() = EnumSet.of(SortOrder.POPULARITY, SortOrder.ALPHABETICAL, SortOrder.UPDATED)
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
val url = if (sortOrder == SortOrder.UPDATED && query.isNullOrEmpty() && tags.isNullOrEmpty()) {
buildString {
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.POPULARITY, SortOrder.ALPHABETICAL, SortOrder.UPDATED)
override val availableStates: Set<MangaState> = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED)
override val isMultipleTagsSupported = false
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
when (filter) {
is MangaListFilter.Search -> {
val url = buildString {
append("https://")
append(domain)
append("/son-eklenenler")
append("?page=")
append(page)
append("/webtoon-listesi?page=")
append(page.toString())
append("&q=")
append(filter.query.urlEncoded())
append("&sort=views&short_type=DESC")
}
} else {
buildString {
return parseMangaList(webClient.httpGet(url).parseHtml())
}
is MangaListFilter.Advanced -> {
if (filter.sortOrder == SortOrder.UPDATED) {
if (filter.tags.isNotEmpty()) {
throw IllegalArgumentException("Sort order updated + Tags or States is not supported by this source")
}
val url = buildString {
append("https://")
append(domain)
append("/webtoon-listesi")
append("?page=")
append(page)
when {
!query.isNullOrEmpty() -> {
append("&q=")
append(query.urlEncoded())
append("/son-eklenenler?page=")
append(page.toString())
}
!tags.isNullOrEmpty() -> {
return parseMangaListUpdated(webClient.httpGet(url).parseHtml())
} else {
val url = buildString {
append("https://")
append(domain)
append("/webtoon-listesi?page=")
append(page.toString())
filter.tags.oneOrThrowIfMany()?.let {
append("&genre=")
append(tag?.key.orEmpty())
}
append(it.key)
}
filter.states.oneOrThrowIfMany()?.let {
append("&status=")
append(
when (it) {
MangaState.ONGOING -> "continues"
MangaState.FINISHED -> "complated"
else -> ""
},
)
}
append("&sort=")
when (sortOrder) {
when (filter.sortOrder) {
SortOrder.POPULARITY -> append("views&short_type=DESC")
SortOrder.ALPHABETICAL -> append("name&short_type=ASC")
else -> append("views&short_type=DESC")
}
}
return parseMangaList(webClient.httpGet(url).parseHtml())
}
}
val doc = webClient.httpGet(url).parseHtml()
val mangas = if (sortOrder == SortOrder.UPDATED && query.isNullOrEmpty() && tags.isNullOrEmpty()) {
doc.select(".page-content div.bslist_item").map { li ->
null -> {
val url = buildString {
append("https://")
append(domain)
append("/son-eklenenler?page=")
append(page.toString())
}
return parseMangaListUpdated(webClient.httpGet(url).parseHtml())
}
}
}
private fun parseMangaList(doc: Document): List<Manga> {
return doc.select(".row .col-xl-4 .card-body").map { li ->
val href = li.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
coverUrl = li.selectFirst(".figure img")?.src().orEmpty(),
title = li.selectFirst(".title")?.text().orEmpty(),
coverUrl = li.selectFirst("img")?.src().orEmpty(),
title = li.selectFirst(".table-responsive a")?.text().orEmpty(),
altTitle = null,
rating = RATING_UNKNOWN,
rating = li.selectFirst(".row .col-xl-4 .mt-2 .my-1 .text-muted")?.text()?.substringBefore("/")
?.toFloatOrNull()?.div(5f) ?: RATING_UNKNOWN,
tags = emptySet(),
author = null,
state = when (doc.selectFirst("d-inline .badge")?.text()) {
state = when (doc.selectLast(".row .col-xl-4 .mt-2 .rounded-pill")?.text()) {
"Devam Ediyor", "Güncel" -> MangaState.ONGOING
"Tamamlandı" -> MangaState.FINISHED
else -> null
@ -84,21 +122,22 @@ class TrWebtoon(context: MangaLoaderContext) :
isNsfw = isNsfwSource,
)
}
} else {
doc.select(".row .col-xl-4 .card-body").map { li ->
}
private fun parseMangaListUpdated(doc: Document): List<Manga> {
return doc.select(".page-content div.bslist_item").map { li ->
val href = li.selectFirstOrThrow("a").attrAsRelativeUrl("href")
Manga(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
coverUrl = li.selectFirst("img")?.src().orEmpty(),
title = li.selectFirst(".table-responsive a")?.text().orEmpty(),
coverUrl = li.selectFirst(".figure img")?.src().orEmpty(),
title = li.selectFirst(".title")?.text().orEmpty(),
altTitle = null,
rating = li.selectFirst(".row .col-xl-4 .mt-2 .my-1 .text-muted")?.text()?.substringBefore("/")
?.toFloatOrNull()?.div(5f) ?: RATING_UNKNOWN,
rating = RATING_UNKNOWN,
tags = emptySet(),
author = null,
state = when (doc.selectLast(".row .col-xl-4 .mt-2 .rounded-pill")?.text()) {
state = when (doc.selectFirst("d-inline .badge")?.text()) {
"Devam Ediyor", "Güncel" -> MangaState.ONGOING
"Tamamlandı" -> MangaState.FINISHED
else -> null
@ -109,9 +148,6 @@ class TrWebtoon(context: MangaLoaderContext) :
}
}
return mangas
}
override suspend fun getAvailableTags(): Set<MangaTag> {
val tags =
webClient.httpGet("https://$domain/webtoon-listesi").parseHtml().requireElementById("collapseExample")

@ -16,43 +16,50 @@ class YaoiFlix(context: MangaLoaderContext) : PagedMangaParser(context, MangaSou
override val configKeyDomain = ConfigKey.Domain("www.yaoiflix.pro")
override suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> {
val tag = tags.oneOrThrowIfMany()
override val isMultipleTagsSupported = false
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!query.isNullOrEmpty() -> {
when (filter) {
is MangaListFilter.Search -> {
if (page > 1) {
append("/page/")
append(page)
append(page.toString())
}
append("/?s=")
append(query.urlEncoded())
append(filter.query.urlEncoded())
}
!tags.isNullOrEmpty() -> {
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty()) {
filter.tags.oneOrThrowIfMany()?.let {
append("/dizi-kategori/")
append(tag?.key.orEmpty())
append(it.key)
append("/")
if (page > 1) {
append("page/")
append(page)
append("/")
append(page.toString())
append('/')
}
}
} else {
append("/tum-seriler/")
if (page > 1) {
append("page/")
append(page.toString())
append('/')
}
}
}
else -> {
null -> {
append("/tum-seriler/")
if (page > 1) {
append("page/")
append(page)
append("/")
append(page.toString())
append('/')
}
}
}

Loading…
Cancel
Save