Merge pull request #220 from davvarrr/master
fix some source and add source
commit
42cc0430f8
@ -0,0 +1,215 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fr
|
||||
|
||||
import okhttp3.Headers
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Calendar
|
||||
import java.util.EnumSet
|
||||
import java.util.Locale
|
||||
|
||||
@MangaSourceParser("LUGNICASCANS", "Lugnica Scans", "fr")
|
||||
internal class LugnicaScans(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.LUGNICASCANS, 10) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.UPDATED,
|
||||
)
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("lugnica-scans.com")
|
||||
|
||||
override val headers: Headers = Headers.Builder()
|
||||
.add("User-Agent", UserAgents.CHROME_DESKTOP)
|
||||
.build()
|
||||
|
||||
init {
|
||||
context.cookieJar.insertCookies(
|
||||
domain,
|
||||
"reader_render=continue;",
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getFavicons(): Favicons {
|
||||
return Favicons(
|
||||
listOf(
|
||||
Favicon("https://$domain/favicon/favicon-32x32.png", 32, null),
|
||||
),
|
||||
domain,
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
if (sortOrder == SortOrder.ALPHABETICAL) {
|
||||
append("/mangas/")
|
||||
// just to stop the search of the ALPHABETICAL page because it contains all the manga and has no page function ( to change if there is a better method to stop the search )
|
||||
if (page == 2) {
|
||||
append(page.toString()) // juste for break
|
||||
}
|
||||
}
|
||||
|
||||
if (sortOrder == SortOrder.UPDATED) {
|
||||
append("/api/manga/home/getlast/")
|
||||
append(page.toString())
|
||||
}
|
||||
}
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
if (sortOrder == SortOrder.UPDATED) {
|
||||
return doc.select(".last_chapters-element")
|
||||
.map { div ->
|
||||
val a = div.selectFirstOrThrow("a.last_chapters-title")
|
||||
val href = a.attrAsAbsoluteUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = a.text(),
|
||||
altTitle = null,
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(domain),
|
||||
rating = div.selectFirstOrThrow(".last_chapters-rate").ownText().toFloatOrNull()?.div(5f)
|
||||
?: -1f,
|
||||
isNsfw = false,
|
||||
coverUrl = div.selectFirstOrThrow(".last_chapters-image img").attrAsAbsoluteUrl("src"),
|
||||
tags = setOf(),
|
||||
state = null,
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
val root = doc.selectFirstOrThrow(".catalog")
|
||||
return root.select("div.element")
|
||||
.map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsAbsoluteUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = div.select("a.title").text(),
|
||||
altTitle = null,
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(domain),
|
||||
rating = div.selectFirstOrThrow("div.stats").lastElementChild()?.ownText()?.toFloatOrNull()
|
||||
?.div(5f) ?: -1f,
|
||||
isNsfw = false,
|
||||
coverUrl = div.selectFirstOrThrow("img").attrAsAbsoluteUrl("src"),
|
||||
tags = setOf(),
|
||||
state = null,
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat("dd-MM-yyyy", Locale.FRANCE)
|
||||
|
||||
return manga.copy(
|
||||
altTitle = null,
|
||||
state = when (root.select("div.manga-tags")[3].select("a").text()) {
|
||||
"En Cours" -> MangaState.ONGOING
|
||||
"Fini", "Abandonné", "Licencier" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
|
||||
// Lists the tags but there is no search on the site so it will just come back to the a-z or last list.
|
||||
tags = root.select("div.manga-tags")[1].select("a").mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.text(),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = root.select("div.manga-staff").text(),
|
||||
description = root.selectFirst("div.manga-description div")?.text(),
|
||||
chapters = root.select("div.manga-chapters_wrapper div.manga-chapter")
|
||||
.mapChapters(reversed = true) { i, div ->
|
||||
|
||||
val a = div.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val name = a.text()
|
||||
|
||||
val dateText = div.select("span").last()?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = name,
|
||||
number = i,
|
||||
url = href,
|
||||
scanlator = null,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
val root = doc.body().requireElementById("forgen_reader")
|
||||
?: throw ParseException("Root not found", fullUrl)
|
||||
return root.select("img").map { img ->
|
||||
val url = img.attrAsRelativeUrlOrNull("data-src") ?: img.attrAsRelativeUrlOrNull("src")
|
||||
?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> = emptySet()
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.startsWith("il y a") -> parseRelativeDate(date)
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet("jour", "jours").anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
WordSet("heure", "heures").anyWordIn(date) -> cal.apply { add(Calendar.HOUR, -number) }.timeInMillis
|
||||
WordSet("minute", "minutes").anyWordIn(date) -> cal.apply { add(Calendar.MINUTE, -number) }.timeInMillis
|
||||
WordSet("seconde", "secondes").anyWordIn(date) -> cal.apply { add(Calendar.SECOND, -number) }.timeInMillis
|
||||
WordSet("mois").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("année", "années").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
WordSet("semaine", "semaines").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.WEEK_OF_MONTH,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package org.koitharu.kotatsu.parsers.site.madara.id
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.madara.MadaraParser
|
||||
import java.util.Locale
|
||||
|
||||
@MangaSourceParser("IMMORTALUPDATESID", "Immortal Updates Id", "id")
|
||||
internal class ImmortalUpdatesId(context: MangaLoaderContext) :
|
||||
MadaraParser(context, MangaSource.IMMORTALUPDATESID, "immortalupdates.id") {
|
||||
|
||||
override val datePattern = "d MMMM yyyy"
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
}
|
||||
@ -0,0 +1,70 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.en
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.model.MangaTag
|
||||
import org.koitharu.kotatsu.parsers.model.SortOrder
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
import org.koitharu.kotatsu.parsers.util.domain
|
||||
import org.koitharu.kotatsu.parsers.util.parseHtml
|
||||
import org.koitharu.kotatsu.parsers.util.urlEncoded
|
||||
import java.util.EnumSet
|
||||
|
||||
@MangaSourceParser("ZAHARD", "Zahard", "en")
|
||||
internal class Zahard(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.ZAHARD, "zahard.xyz", pageSize = 20, searchPageSize = 30) {
|
||||
|
||||
override val listUrl = "/library"
|
||||
override val selectChapter = "#chapterlist > ul > a"
|
||||
override val selectPage = "div#chapter_imgs img"
|
||||
|
||||
|
||||
override val sortOrders: Set<SortOrder>
|
||||
get() = EnumSet.of(SortOrder.NEWEST)
|
||||
private var lastSearchPage = 1
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
if (page > lastSearchPage) {
|
||||
return emptyList()
|
||||
}
|
||||
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append(listUrl)
|
||||
append("?search=")
|
||||
append(query.urlEncoded())
|
||||
append("&page=")
|
||||
append(page)
|
||||
}
|
||||
|
||||
val docs = webClient.httpGet(url).parseHtml()
|
||||
lastSearchPage = docs.selectFirst("a[rel=next]")
|
||||
?.previousElementSibling()
|
||||
?.text()?.toIntOrNull() ?: 1
|
||||
return parseMangaList(docs)
|
||||
}
|
||||
|
||||
val tagKey = "tag".urlEncoded()
|
||||
val tagQuery =
|
||||
if (tags.isNullOrEmpty()) "" else tags.joinToString(separator = "&", prefix = "&") { "$tagKey=${it.key}" }
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append(listUrl)
|
||||
append("?page=")
|
||||
append(page)
|
||||
append(tagQuery)
|
||||
|
||||
}
|
||||
|
||||
return parseMangaList(webClient.httpGet(url).parseHtml())
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,14 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mangareader.id
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mangareader.MangaReaderParser
|
||||
|
||||
@MangaSourceParser("MANGAKYO", "Mangakyo", "id")
|
||||
internal class Mangakyo(context: MangaLoaderContext) :
|
||||
MangaReaderParser(context, MangaSource.MANGAKYO, "mangakyo.org", pageSize = 40, searchPageSize = 20) {
|
||||
|
||||
override val listUrl = "/komik"
|
||||
override val datePattern = "MMM d, yyyy"
|
||||
}
|
||||
@ -0,0 +1,166 @@
|
||||
package org.koitharu.kotatsu.parsers.site.vi
|
||||
|
||||
import okhttp3.Headers
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.EnumSet
|
||||
import java.util.Locale
|
||||
|
||||
@MangaSourceParser("LXMANGA", "Lx Manga", "vi")
|
||||
internal class LxManga(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.LXMANGA, 60) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("lxmanga.net")
|
||||
|
||||
override val headers: Headers = Headers.Builder()
|
||||
.add("User-Agent", UserAgents.CHROME_DESKTOP)
|
||||
.build()
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
val skey = "filter[name]=".urlEncoded()
|
||||
append("/tim-kiem?$skey")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
!tags.isNullOrEmpty() -> {
|
||||
append("/the-loai/")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
|
||||
else -> {
|
||||
append("/danh-sach")
|
||||
}
|
||||
}
|
||||
append("?page=")
|
||||
append(page.toString())
|
||||
append("&sort=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("-views")
|
||||
SortOrder.UPDATED -> append("-updated_at")
|
||||
SortOrder.NEWEST -> append("-created_at")
|
||||
SortOrder.ALPHABETICAL -> append("name")
|
||||
|
||||
else -> append("-updated_at")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
|
||||
return doc.select("div.grid div.manga-vertical")
|
||||
.map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attr("href")
|
||||
val img = div.selectFirstOrThrow(".cover").attr("style").substringAfter("url('").substringBefore("')")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = div.selectFirstOrThrow("a.text-ellipsis").text(),
|
||||
altTitle = null,
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(domain),
|
||||
rating = RATING_UNKNOWN,
|
||||
isNsfw = true,
|
||||
coverUrl = img,
|
||||
tags = setOf(),
|
||||
state = null,
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
|
||||
val dateFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
|
||||
|
||||
return manga.copy(
|
||||
altTitle = root.select(".divider2:contains(Noms associés :)").firstOrNull()?.text(),
|
||||
state = when (root.select("div.grow div.mt-2:contains(Tình trạng) a").first()!!.text()) {
|
||||
"Đang tiến hành" -> MangaState.ONGOING
|
||||
"Đã hoàn thành" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
tags = root.select("div.grow div.mt-2:contains(Thể loại) span a").mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix("/").substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = root.select("div.grow div.mt-2:contains(Tác giả) span a")
|
||||
.joinToString { it.text().trim(',', ' ') },
|
||||
description = root.selectFirst("div.py-4.border-t")?.html(),
|
||||
chapters = root.select("ul.overflow-y-auto.overflow-x-hidden a")
|
||||
.mapChapters(reversed = true) { i, a ->
|
||||
|
||||
val href = a.attr("href")
|
||||
val name = a.selectFirstOrThrow("span.text-ellipsis").text()
|
||||
val date = a.selectFirstOrThrow("span.timeago").attr("datetime")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = name,
|
||||
number = i,
|
||||
url = href,
|
||||
scanlator = null,
|
||||
uploadDate = dateFormat.tryParse(date),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
return doc.select("div.text-center img.lazy").map { img ->
|
||||
val url = img.attrAsRelativeUrlOrNull("data-src") ?: img.attrAsRelativeUrlOrNull("src")
|
||||
?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/").parseHtml()
|
||||
val body = doc.body()
|
||||
return body.select("ul.absolute.w-full a").mapToSet { a ->
|
||||
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix("/").substringAfterLast('/'),
|
||||
title = a.selectFirstOrThrow("span.text-ellipsis").text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue