commit
4d22e7d1e8
@ -0,0 +1,184 @@
|
||||
package org.koitharu.kotatsu.parsers.site.ar
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.awaitAll
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Element
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
@MangaSourceParser("TEAMXNOVEL", "TeamXNovel", "ar")
|
||||
internal class TeamXNovel(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.TEAMXNOVEL, 10) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.UPDATED, SortOrder.POPULARITY)
|
||||
override val configKeyDomain = ConfigKey.Domain("teamxnovel.com")
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val tag = tags.oneOrThrowIfMany()
|
||||
|
||||
val url = buildString {
|
||||
append("https://$domain")
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("/series?genre=")
|
||||
append(tag?.key.orEmpty())
|
||||
if (page > 1) {
|
||||
append("&page=")
|
||||
append(page)
|
||||
}
|
||||
} else if (!query.isNullOrEmpty()) {
|
||||
append("/series?search=")
|
||||
append(query.urlEncoded())
|
||||
if (page > 1) {
|
||||
append("&page=")
|
||||
append(page)
|
||||
}
|
||||
} else {
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("/series")
|
||||
SortOrder.UPDATED -> append("/")
|
||||
else -> append("/")
|
||||
}
|
||||
if (page > 1) {
|
||||
append("?page=")
|
||||
append(page)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.listupd .bs .bsx").ifEmpty {
|
||||
doc.select("div.post-body .box")
|
||||
}.map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsAbsoluteUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = div.select(".tt, h3").text(),
|
||||
altTitle = null,
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(domain),
|
||||
rating = RATING_UNKNOWN,
|
||||
isNsfw = false,
|
||||
coverUrl = div.selectFirstOrThrow("img").src().orEmpty(),
|
||||
tags = emptySet(),
|
||||
state = when (div.selectFirst(".status")?.text()) {
|
||||
"مستمرة" -> MangaState.ONGOING
|
||||
"متوقف", "مكتمل" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/series").parseHtml()
|
||||
return doc.requireElementById("select_genre").select("option").mapNotNullToSet {
|
||||
MangaTag(
|
||||
key = it.attr("value"),
|
||||
title = it.text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val mangaUrl = manga.url.toAbsoluteUrl(domain)
|
||||
|
||||
val maxPageChapterSelect = doc.select(".pagination .page-item a")
|
||||
var maxPageChapter = 1
|
||||
if (!maxPageChapterSelect.isNullOrEmpty()) {
|
||||
maxPageChapterSelect.map {
|
||||
val i = it.attr("href").substringAfterLast("=").toInt()
|
||||
if (i > maxPageChapter) {
|
||||
maxPageChapter = i
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return manga.copy(
|
||||
altTitle = null,
|
||||
state = when (doc.selectFirstOrThrow(".full-list-info:contains(الحالة:) a").text()) {
|
||||
"مستمرة" -> MangaState.ONGOING
|
||||
"متوقف", "مكتمل" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
tags = doc.select(".review-author-info a").mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").substringAfterLast("="),
|
||||
title = a.text(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = null,
|
||||
description = doc.selectFirstOrThrow(".review-content").text(),
|
||||
chapters = run {
|
||||
if (maxPageChapter == 1) {
|
||||
parseChapters(doc)
|
||||
} else {
|
||||
coroutineScope {
|
||||
val result = ArrayList(parseChapters(doc))
|
||||
result.ensureCapacity(result.size * maxPageChapter)
|
||||
(2..maxPageChapter).map { i ->
|
||||
async {
|
||||
loadChapters(mangaUrl, i)
|
||||
}
|
||||
}.awaitAll()
|
||||
.flattenTo(result)
|
||||
result
|
||||
}
|
||||
}
|
||||
}.reversed(),
|
||||
)
|
||||
}
|
||||
|
||||
private suspend fun loadChapters(baseUrl: String, page: Int): List<MangaChapter> {
|
||||
return parseChapters(webClient.httpGet("$baseUrl?page=$page").parseHtml().body())
|
||||
}
|
||||
|
||||
private val dateFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:ss", sourceLocale)
|
||||
|
||||
private fun parseChapters(root: Element): List<MangaChapter> {
|
||||
return root.requireElementById("chapter-contact").select(".eplister ul li")
|
||||
.map { li ->
|
||||
val url = li.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
MangaChapter(
|
||||
id = generateUid(url),
|
||||
name = li.selectFirstOrThrow(".epl-title").text(),
|
||||
number = url.substringAfterLast('/').toIntOrNull() ?: 0,
|
||||
url = url,
|
||||
scanlator = null,
|
||||
uploadDate = dateFormat.tryParse(li.selectFirstOrThrow(".epl-date").text()),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
return doc.select(".image_list img").map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,147 @@
|
||||
package org.koitharu.kotatsu.parsers.site.en
|
||||
|
||||
import okhttp3.Headers
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
@MangaSourceParser("MANGAGEKO", "MangaGeko", "en")
|
||||
internal class MangaGeko(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.MANGAGEKO, 30) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(SortOrder.POPULARITY, SortOrder.UPDATED, SortOrder.NEWEST)
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("www.mangageko.com")
|
||||
|
||||
override val headers: Headers = Headers.Builder()
|
||||
.add("User-Agent", UserAgents.CHROME_DESKTOP)
|
||||
.build()
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val tag = tags.oneOrThrowIfMany()
|
||||
|
||||
val url = if (!query.isNullOrEmpty()) {
|
||||
if (page > 1) {
|
||||
return emptyList()
|
||||
}
|
||||
buildString {
|
||||
append("https://$domain/search/?search=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
} else {
|
||||
buildString {
|
||||
append("https://$domain/browse-comics/?results=")
|
||||
append(page)
|
||||
append("&filter=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("views")
|
||||
SortOrder.UPDATED -> append("Updated")
|
||||
SortOrder.NEWEST -> append("New")
|
||||
else -> append("Updated")
|
||||
}
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("&genre=")
|
||||
append(tag?.key.orEmpty())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("li.novel-item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsAbsoluteUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = div.selectFirstOrThrow("h4").text(),
|
||||
altTitle = null,
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(domain),
|
||||
rating = RATING_UNKNOWN,
|
||||
isNsfw = false,
|
||||
coverUrl = div.selectFirstOrThrow("img").src().orEmpty(),
|
||||
tags = emptySet(),
|
||||
state = null,
|
||||
author = div.selectFirstOrThrow("h6").text().removePrefix("Author(S): "),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/browse-comics/").parseHtml()
|
||||
return doc.select("label.checkbox-inline").mapNotNullToSet { label ->
|
||||
MangaTag(
|
||||
key = label.selectFirstOrThrow("input").attr("value"),
|
||||
title = label.text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat("MMM dd, yyyy", sourceLocale)
|
||||
return manga.copy(
|
||||
altTitle = doc.selectFirstOrThrow(".alternative-title").text(),
|
||||
state = when (doc.selectFirstOrThrow(".header-stats span:contains(Status) strong").text()) {
|
||||
"Ongoing" -> MangaState.ONGOING
|
||||
"Completed" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
tags = doc.select(".categories ul li a").mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").substringAfterLast("="),
|
||||
title = a.text(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = doc.selectFirstOrThrow(".author").text(),
|
||||
description = doc.selectFirstOrThrow(".description").html(),
|
||||
chapters = doc.requireElementById("chapters").select("ul.chapter-list li")
|
||||
.mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val url = a.attrAsRelativeUrl("href")
|
||||
val name = li.selectFirstOrThrow(".chapter-title").text()
|
||||
val dateText = li.select(".chapter-update").attr("datetime").substringBeforeLast(',')
|
||||
.replace(".", "").replace("Sept", "Sep")
|
||||
MangaChapter(
|
||||
id = generateUid(url),
|
||||
name = name,
|
||||
number = i + 1,
|
||||
url = url,
|
||||
scanlator = null,
|
||||
uploadDate = dateFormat.tryParse(dateText),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
return doc.requireElementById("chapter-reader").select("img").map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,168 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fr
|
||||
|
||||
import okhttp3.Headers
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
@MangaSourceParser("LEGACY_SCANS", "Legacy Scans", "fr")
|
||||
internal class LegacyScansParser(context: MangaLoaderContext) :
|
||||
PagedMangaParser(context, MangaSource.LEGACY_SCANS, 18) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("legacy-scans.com")
|
||||
|
||||
override val headers: Headers = Headers.Builder()
|
||||
.add("User-Agent", UserAgents.CHROME_MOBILE)
|
||||
.build()
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
|
||||
val end = page * pageSize
|
||||
val start = end - (pageSize - 1)
|
||||
val url = if (!query.isNullOrEmpty()) {
|
||||
if (page > 1) {
|
||||
return emptyList()
|
||||
}
|
||||
buildString {
|
||||
append("https://api.$domain/misc/home/search?title=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
} else {
|
||||
buildString {
|
||||
append("https://api.$domain/misc/comic/search/query?status=&order=&genreNames=")
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
append(',')
|
||||
}
|
||||
}
|
||||
append("&type=&start=")
|
||||
append(start)
|
||||
append("&end=")
|
||||
append(end)
|
||||
}
|
||||
|
||||
}
|
||||
val json = webClient.httpGet(url).parseJson()
|
||||
return if (!query.isNullOrEmpty()) {
|
||||
json.getJSONArray("results").mapJSON { j ->
|
||||
val slug = j.getString("slug")
|
||||
val urlManga = "https://$domain/comics/$slug"
|
||||
Manga(
|
||||
id = generateUid(urlManga),
|
||||
title = j.getString("title"),
|
||||
altTitle = null,
|
||||
url = urlManga,
|
||||
publicUrl = urlManga,
|
||||
rating = RATING_UNKNOWN,
|
||||
isNsfw = false,
|
||||
coverUrl = "",
|
||||
tags = setOf(),
|
||||
state = null,
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
json.getJSONArray("comics").mapJSON { j ->
|
||||
val slug = j.getString("slug")
|
||||
val urlManga = "https://$domain/comics/$slug"
|
||||
Manga(
|
||||
id = generateUid(urlManga),
|
||||
title = j.getString("title"),
|
||||
altTitle = null,
|
||||
url = urlManga,
|
||||
publicUrl = urlManga,
|
||||
rating = RATING_UNKNOWN,
|
||||
isNsfw = false,
|
||||
coverUrl = "https://api.$domain/" + j.getString("cover"),
|
||||
tags = setOf(),
|
||||
state = null,
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat("dd/MM/yyyy", Locale.FRENCH)
|
||||
|
||||
return manga.copy(
|
||||
altTitle = null,
|
||||
tags = root.select("div.serieGenre span").mapNotNullToSet { span ->
|
||||
MangaTag(
|
||||
key = span.text(),
|
||||
title = span.text(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
coverUrl = root.selectFirstOrThrow("div.serieImg img").attr("src"),
|
||||
author = root.select("div.serieAdd p:contains(Auteur:) strong").text(),
|
||||
description = root.selectFirst("div.serieDescription div")?.html(),
|
||||
chapters = root.select("div.chapterList a")
|
||||
.mapChapters(reversed = true) { i, a ->
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val name = a.selectFirstOrThrow("span").text()
|
||||
val dateText = a.selectLast("span")?.text() ?: "0"
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = name,
|
||||
number = i + 1,
|
||||
url = href,
|
||||
scanlator = null,
|
||||
uploadDate = dateFormat.tryParse(dateText),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
return doc.select("div.readerComics img").map { img ->
|
||||
val url = img.src() ?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/comics").parseHtml()
|
||||
val script = doc.requireElementById("__NUXT_DATA__").data()
|
||||
.substringAfterLast("\"genres\"").substringBeforeLast("\"comics\"")
|
||||
.split("\",\"").drop(1)
|
||||
|
||||
return script.mapNotNullToSet { tag ->
|
||||
MangaTag(
|
||||
key = tag.substringBeforeLast("\",{"),
|
||||
title = tag.substringBeforeLast("\",{"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,163 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fr
|
||||
|
||||
import okhttp3.Headers
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
@MangaSourceParser("PERF_SCAN", "Perf Scan", "fr")
|
||||
internal class PerfScan(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.PERF_SCAN, 12) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("perf-scan.fr")
|
||||
|
||||
override val headers: Headers = Headers.Builder()
|
||||
.add("User-Agent", UserAgents.CHROME_DESKTOP)
|
||||
.build()
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
|
||||
val url = buildString {
|
||||
append("https://api.$domain/query?query_string=")
|
||||
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
append("&series_status=All&order=desc&orderBy=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("total_views")
|
||||
SortOrder.UPDATED -> append("latest")
|
||||
SortOrder.NEWEST -> append("created_at")
|
||||
SortOrder.ALPHABETICAL -> append("title")
|
||||
else -> append("latest")
|
||||
}
|
||||
|
||||
append("&series_type=Comic&page=")
|
||||
append(page)
|
||||
append("&perPage=12&tags_ids=")
|
||||
append("[]".urlEncoded())
|
||||
}
|
||||
val json = webClient.httpGet(url).parseJson()
|
||||
return json.getJSONArray("data").mapJSON { j ->
|
||||
val slug = j.getString("series_slug")
|
||||
val urlManga = "https://$domain/series/$slug"
|
||||
Manga(
|
||||
id = generateUid(urlManga),
|
||||
title = j.getString("title"),
|
||||
altTitle = null,
|
||||
url = urlManga,
|
||||
publicUrl = urlManga,
|
||||
rating = RATING_UNKNOWN,
|
||||
isNsfw = false,
|
||||
coverUrl = j.getString("thumbnail"),
|
||||
tags = setOf(),
|
||||
state = when (j.getString("status")) {
|
||||
"Ongoing" -> MangaState.ONGOING
|
||||
"Completed" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat("MM/DD/yyyy", Locale.ENGLISH)
|
||||
|
||||
return manga.copy(
|
||||
altTitle = root.selectFirstOrThrow("p.text-center.text-gray-400").text(),
|
||||
tags = emptySet(),
|
||||
author = root.select("div.flex.flex-col.gap-y-2 p:contains(Autor:) strong").text(),
|
||||
description = root.selectFirst(".datas_synopsis")?.html(),
|
||||
chapters = root.select("ul.grid a")
|
||||
.mapChapters(reversed = true) { i, a ->
|
||||
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val name = a.selectFirstOrThrow("span").text()
|
||||
val dateText = a.selectLast("span")?.text() ?: "0"
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = name,
|
||||
number = i + 1,
|
||||
url = href,
|
||||
scanlator = null,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
return doc.select("p.flex img").map { img ->
|
||||
val url = img.src() ?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> = emptySet()
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") -> parseRelativeDate(date)
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet("day", "days").anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
WordSet("hour", "hours").anyWordIn(date) -> cal.apply { add(Calendar.HOUR, -number) }.timeInMillis
|
||||
WordSet("minute", "minutes").anyWordIn(date) -> cal.apply { add(Calendar.MINUTE, -number) }.timeInMillis
|
||||
WordSet("second").anyWordIn(date) -> cal.apply { add(Calendar.SECOND, -number) }.timeInMillis
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
WordSet("week").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.WEEK_OF_MONTH,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,50 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.all
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
|
||||
@MangaSourceParser("ERO18X", "Ero18x", "", ContentType.HENTAI)
|
||||
internal class Ero18x(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.ERO18X, "ero18x.com", 10) {
|
||||
override val datePattern = "MMMM d"
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
override val withoutAjax = true
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val link = href + stylepage
|
||||
val dateText = li.selectFirst("a.c-new-tag")?.attr("title") ?: li.selectFirst(selectDate)?.text()
|
||||
|
||||
val name = a.selectFirst("p")?.text() ?: a.ownText()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = name,
|
||||
number = i + 1,
|
||||
url = link,
|
||||
uploadDate = if (dateText == "Newly Published!") {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
"today",
|
||||
)
|
||||
} else {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
)
|
||||
},
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,57 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.all
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrlOrNull
|
||||
import org.koitharu.kotatsu.parsers.util.generateUid
|
||||
import org.koitharu.kotatsu.parsers.util.mapChapters
|
||||
import org.koitharu.kotatsu.parsers.util.parseFailed
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
|
||||
@MangaSourceParser("MANHWARAW", "Manhwa Raw", "", ContentType.HENTAI)
|
||||
internal class ManhwaRaw(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.MANHWARAW, "manhwa-raw.com", 10) {
|
||||
override val datePattern = "MMMM d"
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
override val withoutAjax = true
|
||||
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirst("a")
|
||||
val href = a?.attrAsRelativeUrlOrNull("href") ?: li.parseFailed("Link is missing")
|
||||
val link = href + stylepage
|
||||
val dateText = li.selectFirst("a.c-new-tag")?.attr("title") ?: li.selectFirst(selectDate)?.text()
|
||||
|
||||
val name = a.selectFirst("p")?.text() ?: a.ownText()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = name,
|
||||
number = i + 1,
|
||||
url = link,
|
||||
uploadDate = if (dateText == "Newly Published!") {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
"today",
|
||||
)
|
||||
} else {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
)
|
||||
},
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("GOODGIRLS", "GoodGirls", "en")
|
||||
internal class GoodGirls(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.GOODGIRLS, "goodgirls.moe", 10) {
|
||||
override val selectDesc = "div.post-content_item:contains(Synopsis) div.summary-content"
|
||||
}
|
||||
@ -0,0 +1,11 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("HIPERDEX", "HiperDex", "en", ContentType.HENTAI)
|
||||
internal class HiperDex(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.HIPERDEX, "hiperdex.com", 36)
|
||||
@ -0,0 +1,13 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("MANGAOWLBLOG", "MangaOwl Blog (unoriginal)", "en", ContentType.HENTAI)
|
||||
internal class MangaOwlBlog(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.MANGAOWLBLOG, "mangaowl.blog", 20) {
|
||||
override val postreq = true
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("MANGARAWINFO", "Manga-Raw Info (unoriginal)", "en")
|
||||
internal class MangaRawInfo(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.MANGARAWINFO, "manga-raw.info", 20) {
|
||||
override val postreq = true
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("MANGAUPDATESTOP", "MangaUpdates Top (unoriginal)", "en")
|
||||
internal class MangaUpdatesTop(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.MANGAUPDATESTOP, "mangaupdates.top", 10) {
|
||||
override val postreq = true
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("MANHUASCANINFO", "ManhuaScan Info (unoriginal)", "en")
|
||||
internal class ManhuaScanInfo(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.MANHUASCANINFO, "manhuascan.info", 10) {
|
||||
override val postreq = true
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("RACKUSREADS", "RackusReads", "en")
|
||||
internal class RackusReads(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.RACKUSREADS, "rackusreads.com", 20) {
|
||||
|
||||
override val datePattern = "MM/dd/yyyy"
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("STKISSMANGABLOG", "StkissManga Blog", "en")
|
||||
internal class StkissMangaBlog(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.STKISSMANGABLOG, "1stkissmanga.blog", 10) {
|
||||
override val postreq = true
|
||||
}
|
||||
@ -0,0 +1,49 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.es
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
@MangaSourceParser("MANHWA_ES", "Manhwa Es", "es")
|
||||
internal class ManhwaEs(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.MANHWA_ES, "manhwa-es.com", 10) {
|
||||
|
||||
override val withoutAjax = true
|
||||
override val datePattern = "d 'de' MMMM"
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val link = href + stylepage
|
||||
val dateText = li.selectFirst("a.c-new-tag")?.attr("title") ?: li.selectFirst(selectDate)?.text()
|
||||
|
||||
val name = li.selectFirstOrThrow(".mini-letters a").text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = name,
|
||||
number = i + 1,
|
||||
url = link,
|
||||
uploadDate = if (dateText == "¡Recién publicado!") {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
"today",
|
||||
)
|
||||
} else {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
)
|
||||
},
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.es
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
|
||||
@MangaSourceParser("UNITOONOFICIAL", "UniToonOficial", "es")
|
||||
internal class UniToonOficial(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.UNITOONOFICIAL, "unitoonoficial.com") {
|
||||
|
||||
override val datePattern = "dd/MM/yyyy"
|
||||
override val tagPrefix = "generos/"
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.fr
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("READERGEN", "Readergen", "fr")
|
||||
internal class Readergen(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.READERGEN, "fr.readergen.fr", 18)
|
||||
@ -0,0 +1,123 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.id
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.util.*
|
||||
|
||||
@MangaSourceParser("MANHWAHUB", "ManhwaHub", "id", ContentType.HENTAI)
|
||||
internal class ManhwaHub(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.MANHWAHUB, "manhwahub.net", 40) {
|
||||
|
||||
override val tagPrefix = "genre/"
|
||||
override val datePattern = "MMMM d, yyyy"
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
override val withoutAjax = true
|
||||
override val listUrl = "genre/manhwa"
|
||||
override val selectTestAsync = "ul.box-list-chapter"
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val tag = tags.oneOrThrowIfMany()
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
append("/search?s=")
|
||||
append(query.urlEncoded())
|
||||
append("&page=")
|
||||
append(page)
|
||||
append("&")
|
||||
}
|
||||
|
||||
!tags.isNullOrEmpty() -> {
|
||||
append("/$tagPrefix")
|
||||
append(tag?.key.orEmpty())
|
||||
append("?page=")
|
||||
append(page)
|
||||
append("&")
|
||||
}
|
||||
|
||||
else -> {
|
||||
|
||||
append("/$listUrl")
|
||||
append("?page=")
|
||||
append(page)
|
||||
append("&")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
append("m_orderby=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("views")
|
||||
SortOrder.UPDATED -> append("latest")
|
||||
SortOrder.NEWEST -> append("new-manga")
|
||||
SortOrder.ALPHABETICAL -> append("alphabet")
|
||||
SortOrder.RATING -> append("rating")
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.row.c-tabs-item__content").ifEmpty {
|
||||
doc.select("div.page-item-detail")
|
||||
}.map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
val summary = div.selectFirst(".tab-summary") ?: div.selectFirst(".item-summary")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = (summary?.selectFirst("h3") ?: summary?.selectFirst("h4")
|
||||
?: div.selectFirst("h5.series-title"))?.text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = div.selectFirst("span.total_votes")?.ownText()?.toFloatOrNull()?.div(5f) ?: -1f,
|
||||
tags = summary?.selectFirst(".mg_genres")?.select("a")?.mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().ifEmpty { return@mapNotNullToSet null }.toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
}.orEmpty(),
|
||||
author = summary?.selectFirst(".mg_author")?.selectFirst("a")?.ownText(),
|
||||
state = when (summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")?.ownText()
|
||||
?.lowercase()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain").parseHtml()
|
||||
return doc.select("div.genres li").mapNotNullToSet { li ->
|
||||
val a = li.selectFirst("a") ?: return@mapNotNullToSet null
|
||||
val href = a.attr("href").removeSuffix("/").substringAfterLast(tagPrefix, "")
|
||||
MangaTag(
|
||||
key = href,
|
||||
title = a.ownText().trim().ifEmpty {
|
||||
a.selectFirst(".menu-image-title")?.text()?.trim() ?: return@mapNotNullToSet null
|
||||
}.toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.it
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("BEYONDTHEATARAXIA", "Beyondtheataraxia", "it")
|
||||
internal class Beyondtheataraxia(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.BEYONDTHEATARAXIA, "www.beyondtheataraxia.com", 10) {
|
||||
override val datePattern = "d MMMM yyyy"
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.tr
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("ALLIED_FANSUB", "Allied Fansub", "tr", ContentType.HENTAI)
|
||||
internal class AlliedFansub(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.ALLIED_FANSUB, "alliedfansub.online", 20) {
|
||||
override val datePattern = "dd/MM/yyyy"
|
||||
}
|
||||
@ -0,0 +1,11 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.tr
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
|
||||
@MangaSourceParser("SARCASMSCANS", "Sarcasm Scans", "tr", ContentType.HENTAI)
|
||||
internal class SarcasmScans(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.SARCASMSCANS, "sarcasmscans.com", 16)
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.ar
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("ARESMANGA", "AresManga", "ar")
|
||||
internal class AresManga(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.ARESMANGA, "aresmanga.org", pageSize = 20, searchPageSize = 10) {
|
||||
override val listUrl = "/series"
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.ar
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("ENARESMANGA", "EnAresManga", "ar")
|
||||
internal class EnAresManga(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.ENARESMANGA, "en-aresmanga.com", pageSize = 20, searchPageSize = 10) {
|
||||
override val listUrl = "/series"
|
||||
}
|
||||
@ -0,0 +1,98 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.ar
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
|
||||
@MangaSourceParser("VEXMANGA", "Vex Manga", "ar")
|
||||
internal class VexManga(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.VEXMANGA, "vexmanga.net", pageSize = 10, searchPageSize = 10) {
|
||||
override val selectMangalist = ".listupd .latest-series"
|
||||
override val selectChapter = ".ulChapterList > a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
val chapters = docs.select(selectChapter).mapChapters(reversed = true) { index, element ->
|
||||
val url = element.selectFirst("a")?.attrAsRelativeUrl("href") ?: return@mapChapters null
|
||||
MangaChapter(
|
||||
id = generateUid(url),
|
||||
name = element.selectFirst(".chapternum")?.text() ?: "Chapter ${index + 1}",
|
||||
url = url,
|
||||
number = index + 1,
|
||||
scanlator = null,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
element.selectFirst(".chapterdate")?.text(),
|
||||
),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
return parseInfo(docs, manga, chapters)
|
||||
}
|
||||
|
||||
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.startsWith("منذ") -> parseRelativeDate(date)
|
||||
d.startsWith("جديد") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"أيام",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"أسابيع",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.WEEK_OF_YEAR, -number) }.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"ساعة",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"دقائق",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("ثوان").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("أشهر").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("FREAKCOMIC", "FreakComic", "en")
|
||||
internal class FreakComic(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.FREAKCOMIC, "freakcomic.com", pageSize = 20, searchPageSize = 10)
|
||||
@ -0,0 +1,237 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.en
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
|
||||
|
||||
@MangaSourceParser("MANHWA_FREAK", "Manhwa Freak", "en")
|
||||
internal class ManhwaFreak(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.MANHWA_FREAK, "manhwa-freak.com", pageSize = 0, searchPageSize = 10) {
|
||||
|
||||
override val selectMangalist = ".listupd .lastest-serie"
|
||||
override val selectMangaListImg = "img"
|
||||
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
if (page > lastSearchPage) {
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/page/")
|
||||
append(page)
|
||||
append("/?s=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
val docs = webClient.httpGet(url).parseHtml()
|
||||
lastSearchPage = docs.selectFirst(".pagination .next")
|
||||
?.previousElementSibling()
|
||||
?.text()?.toIntOrNull() ?: 1
|
||||
return parseMangaList(docs)
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
|
||||
if (page > 1) {
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
val tag = tags.oneOrThrowIfMany()
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/genres/?genre=")
|
||||
append(tag?.key.orEmpty())
|
||||
}
|
||||
|
||||
return parseMangaList(webClient.httpGet(url).parseHtml())
|
||||
}
|
||||
|
||||
if (page > 1) {
|
||||
return emptyList()
|
||||
}
|
||||
val sortQuery = when (sortOrder) {
|
||||
SortOrder.ALPHABETICAL -> "az"
|
||||
SortOrder.NEWEST -> "new"
|
||||
SortOrder.POPULARITY -> "views"
|
||||
SortOrder.UPDATED -> ""
|
||||
else -> ""
|
||||
}
|
||||
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append(listUrl)
|
||||
append("/?order=")
|
||||
append(sortQuery)
|
||||
}
|
||||
|
||||
return parseMangaList(webClient.httpGet(url).parseHtml())
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/genres/").parseHtml()
|
||||
|
||||
return doc.select("ul.genre-list li a").mapNotNullToSet { a ->
|
||||
val href = a.attr("href").substringAfterLast("=")
|
||||
MangaTag(
|
||||
key = href,
|
||||
title = a.text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
val chapters = docs.select("div.chapter-li a").mapChapters(reversed = true) { index, a ->
|
||||
val url = a.attrAsRelativeUrl("href")
|
||||
val dateText = a.selectFirst(".chapter-info p.new")?.text() ?: a.select(".chapter-info p")[1].text()
|
||||
MangaChapter(
|
||||
id = generateUid(url),
|
||||
name = a.selectFirst(".chapter-info p:contains(Chapter)")?.text() ?: "Chapter ${index + 1}",
|
||||
url = url,
|
||||
number = index + 1,
|
||||
scanlator = null,
|
||||
uploadDate = if (dateText == "NEW") {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
"today",
|
||||
)
|
||||
} else {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
)
|
||||
},
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
return parseInfo(docs, manga, chapters)
|
||||
}
|
||||
|
||||
override suspend fun parseInfo(docs: Document, manga: Manga, chapters: List<MangaChapter>): Manga {
|
||||
|
||||
val tagMap = getOrCreateTagMap()
|
||||
val selectTag = docs.requireElementById("info").select("div:contains(Genre) > p:last-child").text().split(",")
|
||||
val tags = selectTag.mapNotNullToSet { tagMap[it] }
|
||||
|
||||
val mangaState = docs.requireElementById("info").select("div:contains(Status) > p:last-child").text().let {
|
||||
when (it) {
|
||||
"Ongoing" -> MangaState.ONGOING
|
||||
"Completed" -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
val author = docs.requireElementById("info").select("div:contains(Author(s)) > p:last-child").text()
|
||||
|
||||
return manga.copy(
|
||||
altTitle = docs.requireElementById("info").select("div:contains(Alternative) > p:last-child").text(),
|
||||
description = docs.requireElementById("summary").html(),
|
||||
state = mangaState,
|
||||
author = author,
|
||||
isNsfw = manga.isNsfw,
|
||||
tags = tags,
|
||||
chapters = chapters,
|
||||
)
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") -> parseRelativeDate(date)
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
"d",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"hour",
|
||||
"hours",
|
||||
"h",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"minute",
|
||||
"minutes",
|
||||
"mins",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.es
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("MANGASHIINA", "Manga Shiina", "es")
|
||||
internal class MangaShiina(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.MANGASHIINA, "mangashiina.com", pageSize = 20, searchPageSize = 10)
|
||||
@ -1,10 +0,0 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.fr
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("LEGACY_SCANS", "Legacy Scans", "fr")
|
||||
internal class LegacyScansParser(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.LEGACY_SCANS, "legacy-scans.com", pageSize = 20, searchPageSize = 10)
|
||||
@ -0,0 +1,238 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.en
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.Locale
|
||||
|
||||
|
||||
@MangaSourceParser("MANHWA_FREAK_FR", "Manhwa Freak Fr", "fr")
|
||||
internal class ManhwaFreakFr(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.MANHWA_FREAK_FR, "manhwafreak.fr", pageSize = 0, searchPageSize = 10) {
|
||||
|
||||
override val selectMangalist = ".listupd .lastest-serie"
|
||||
override val selectMangaListImg = "img"
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
if (page > lastSearchPage) {
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/page/")
|
||||
append(page)
|
||||
append("/?s=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
val docs = webClient.httpGet(url).parseHtml()
|
||||
lastSearchPage = docs.selectFirst(".pagination .next")
|
||||
?.previousElementSibling()
|
||||
?.text()?.toIntOrNull() ?: 1
|
||||
return parseMangaList(docs)
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
|
||||
if (page > 1) {
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
val tag = tags.oneOrThrowIfMany()
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/genres/?genre=")
|
||||
append(tag?.key.orEmpty())
|
||||
}
|
||||
|
||||
return parseMangaList(webClient.httpGet(url).parseHtml())
|
||||
}
|
||||
|
||||
if (page > 1) {
|
||||
return emptyList()
|
||||
}
|
||||
val sortQuery = when (sortOrder) {
|
||||
SortOrder.ALPHABETICAL -> "az"
|
||||
SortOrder.NEWEST -> "new"
|
||||
SortOrder.POPULARITY -> "views"
|
||||
SortOrder.UPDATED -> ""
|
||||
else -> ""
|
||||
}
|
||||
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append(listUrl)
|
||||
append("/?order=")
|
||||
append(sortQuery)
|
||||
}
|
||||
|
||||
return parseMangaList(webClient.httpGet(url).parseHtml())
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/genres/").parseHtml()
|
||||
|
||||
return doc.select("ul.genre-list li a").mapNotNullToSet { a ->
|
||||
val href = a.attr("href").substringAfterLast("=")
|
||||
MangaTag(
|
||||
key = href,
|
||||
title = a.text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val docs = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
val chapters = docs.select("div.chapter-li a").mapChapters(reversed = true) { index, a ->
|
||||
val url = a.attrAsRelativeUrl("href")
|
||||
val dateText = a.selectFirst(".chapter-info p.new")?.text() ?: a.select(".chapter-info p")[1].text()
|
||||
MangaChapter(
|
||||
id = generateUid(url),
|
||||
name = a.selectFirst(".chapter-info p:contains(Chapter)")?.text() ?: "Chapter ${index + 1}",
|
||||
url = url,
|
||||
number = index + 1,
|
||||
scanlator = null,
|
||||
uploadDate = if (dateText == "NEW") {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
"today",
|
||||
)
|
||||
} else {
|
||||
parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
)
|
||||
},
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
return parseInfo(docs, manga, chapters)
|
||||
}
|
||||
|
||||
override suspend fun parseInfo(docs: Document, manga: Manga, chapters: List<MangaChapter>): Manga {
|
||||
|
||||
val tagMap = getOrCreateTagMap()
|
||||
val selectTag = docs.requireElementById("info").select("div:contains(Genre) > p:last-child").text().split(",")
|
||||
val tags = selectTag.mapNotNullToSet { tagMap[it] }
|
||||
|
||||
val mangaState = docs.requireElementById("info").select("div:contains(Status) > p:last-child").text().let {
|
||||
when (it) {
|
||||
"Ongoing" -> MangaState.ONGOING
|
||||
"Completed" -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
val author = docs.requireElementById("info").select("div:contains(Author(s)) > p:last-child").text()
|
||||
|
||||
return manga.copy(
|
||||
altTitle = docs.requireElementById("info").select("div:contains(Alternative) > p:last-child").text(),
|
||||
description = docs.requireElementById("summary").html(),
|
||||
state = mangaState,
|
||||
author = author,
|
||||
isNsfw = manga.isNsfw,
|
||||
tags = tags,
|
||||
chapters = chapters,
|
||||
)
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") -> parseRelativeDate(date)
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
"d",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"hour",
|
||||
"hours",
|
||||
"h",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"minute",
|
||||
"minutes",
|
||||
"mins",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.id
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("OMKOMIK", "OmKomik", "id")
|
||||
internal class OmKomik(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.OMKOMIK, "omkomik.com", pageSize = 20, searchPageSize = 10)
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.tr
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("LSHISTORIA", "Lshistoria", "tr")
|
||||
internal class Lshistoria(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.LSHISTORIA, "omkomik.com", pageSize = 20, searchPageSize = 10)
|
||||
@ -0,0 +1,11 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.tr
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("MOONDAISY_SCANS", "MoonDaisyScans", "tr", ContentType.HENTAI)
|
||||
internal class MoonDaisyScans(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.MOONDAISY_SCANS, "moondaisyscans.com", pageSize = 20, searchPageSize = 10)
|
||||
@ -0,0 +1,11 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.tr
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("SUMMERTOON", "Summer Toon", "tr")
|
||||
internal class SummerToon(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.SUMMERTOON, "summertoon.com", pageSize = 10, searchPageSize = 10)
|
||||
|
||||
Loading…
Reference in New Issue