add zmanga and soruces
parent
fefec4985d
commit
cd0d4b103a
@ -0,0 +1,193 @@
|
||||
package org.koitharu.kotatsu.parsers.site.fr
|
||||
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import okhttp3.Headers
|
||||
import org.json.JSONArray
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.network.UserAgents
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.util.*
|
||||
|
||||
@MangaSourceParser("SCANS_MANGAS_ME", "Scans Mangas Me", "fr")
|
||||
internal class ScansMangasMe(context: MangaLoaderContext) :
|
||||
PagedMangaParser(context, MangaSource.SCANS_MANGAS_ME, 1000000) {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain("scansmangas.me")
|
||||
|
||||
override val headers: Headers = Headers.Builder()
|
||||
.add("User-Agent", UserAgents.CHROME_DESKTOP)
|
||||
.build()
|
||||
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
if (page == 1) {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("/?s=")
|
||||
append(query.urlEncoded())
|
||||
append("&post_type=manga")
|
||||
|
||||
} else if (!tags.isNullOrEmpty()) {
|
||||
append("/genres/")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
} else {
|
||||
append("/tous-nos-mangas/")
|
||||
append("?order=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("popular")
|
||||
SortOrder.UPDATED -> append("update")
|
||||
SortOrder.ALPHABETICAL -> append("title")
|
||||
SortOrder.NEWEST -> append("create")
|
||||
else -> append("update")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
append("/stop")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.postbody .bs .bsx").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("div.bigor div.tt").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = div.selectFirstOrThrow("div.rating i").ownText().toFloatOrNull()?.div(10f)
|
||||
?: RATING_UNKNOWN,
|
||||
tags = emptySet(),
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/tous-nos-mangas/").parseHtml()
|
||||
return doc.select("ul.genre li").mapNotNullToSet { li ->
|
||||
val key = li.selectFirstOrThrow("a").attr("href").removeSuffix('/').substringAfterLast('/')
|
||||
val name = li.selectFirstOrThrow("a").text()
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = getChapters(doc)
|
||||
|
||||
val desc = doc.selectFirstOrThrow("div.desc").html()
|
||||
|
||||
val state = if (doc.select("div.spe span:contains(En cours)").isNullOrEmpty()) {
|
||||
MangaState.FINISHED
|
||||
} else {
|
||||
MangaState.ONGOING
|
||||
}
|
||||
|
||||
val alt = doc.body().select("div.infox span.alter").text()
|
||||
|
||||
val aut = doc.select("div.spe span")[2].text().replace("Auteur:", "")
|
||||
|
||||
manga.copy(
|
||||
tags = doc.select("div.spe span:contains(Genres) a").mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = aut,
|
||||
state = state,
|
||||
chapters = chaptersDeferred,
|
||||
isNsfw = manga.isNsfw,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
private fun getChapters(doc: Document): List<MangaChapter> {
|
||||
return doc.body().select("ul#chapter_list li").mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = li.selectFirstOrThrow("span.mobile chapter").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = 0,
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val script = doc.selectFirstOrThrow("script:containsData(page_image)")
|
||||
val images = JSONArray(script.data().substringAfterLast("var pages = ").substringBefore(';'))
|
||||
|
||||
val pages = ArrayList<MangaPage>(images.length())
|
||||
for (i in 0 until images.length()) {
|
||||
|
||||
val pageTake = images.getJSONObject(i)
|
||||
pages.add(
|
||||
MangaPage(
|
||||
id = generateUid(pageTake.getString("page_image")),
|
||||
url = pageTake.getString("page_image"),
|
||||
preview = null,
|
||||
source = source,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
return pages
|
||||
}
|
||||
|
||||
|
||||
private fun Element.src(): String? {
|
||||
var result = absUrl("data-src")
|
||||
if (result.isEmpty()) result = absUrl("data-cfsrc")
|
||||
if (result.isEmpty()) result = absUrl("src")
|
||||
return result.ifEmpty { null }
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,21 @@
|
||||
package org.koitharu.kotatsu.parsers.site.mmrcms.fr
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.mmrcms.MmrcmsParser
|
||||
import java.util.Locale
|
||||
|
||||
|
||||
@MangaSourceParser("SCANMANGAVF_WS", "Scan Manga Vf Ws", "fr")
|
||||
internal class ScanMangaVfWs(context: MangaLoaderContext) :
|
||||
MmrcmsParser(context, MangaSource.SCANMANGAVF_WS, "scanmanga-vf.ws") {
|
||||
|
||||
override val imgUpdated = ".jpg"
|
||||
|
||||
override val selectTag = "dt:contains(Genres)"
|
||||
override val selectAlt = "dt:contains(Appelé aussi)"
|
||||
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
}
|
||||
@ -0,0 +1,313 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.PagedMangaParser
|
||||
import org.koitharu.kotatsu.parsers.config.ConfigKey
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal abstract class ZMangaParser(
|
||||
context: MangaLoaderContext,
|
||||
source: MangaSource,
|
||||
domain: String,
|
||||
pageSize: Int = 16,
|
||||
) : PagedMangaParser(context, source, pageSize) {
|
||||
|
||||
override val configKeyDomain = ConfigKey.Domain(domain)
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.RATING,
|
||||
)
|
||||
|
||||
protected open val listUrl = "advanced-search/"
|
||||
protected open val datePattern = "MMMM d, yyyy"
|
||||
|
||||
|
||||
init {
|
||||
paginator.firstPage = 1
|
||||
searchPaginator.firstPage = 1
|
||||
}
|
||||
|
||||
|
||||
@JvmField
|
||||
protected val ongoing: Set<String> = setOf(
|
||||
"On Going",
|
||||
"Ongoing",
|
||||
)
|
||||
|
||||
@JvmField
|
||||
protected val finished: Set<String> = setOf(
|
||||
"Completed",
|
||||
)
|
||||
|
||||
override suspend fun getListPage(
|
||||
page: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder,
|
||||
): List<Manga> {
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/$listUrl")
|
||||
if(page > 1){
|
||||
append("page/")
|
||||
append(page.toString())
|
||||
append("/")
|
||||
}
|
||||
|
||||
append("?order=")
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> append("popular")
|
||||
SortOrder.UPDATED -> append("update")
|
||||
SortOrder.ALPHABETICAL -> append("title")
|
||||
SortOrder.NEWEST -> append("latest")
|
||||
SortOrder.RATING -> append("rating")
|
||||
}
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("&title=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
for (tag in tags) {
|
||||
append("&")
|
||||
append("genre[]".urlEncoded())
|
||||
append("=")
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val doc = webClient.httpGet(url).parseHtml()
|
||||
|
||||
return doc.select("div.flexbox2-item").map { div ->
|
||||
val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
|
||||
coverUrl = div.selectFirst("img")?.src().orEmpty(),
|
||||
title = div.selectFirstOrThrow("div.flexbox2-title span:not(.studio)").text().orEmpty(),
|
||||
altTitle = null,
|
||||
rating = div.selectFirstOrThrow("div.info div.score").ownText().toFloatOrNull()?.div(10f)
|
||||
?: RATING_UNKNOWN,
|
||||
tags = doc.body().select("div.genres a").mapNotNullToSet { span ->
|
||||
MangaTag(
|
||||
key = span.attr("class"),
|
||||
title = span.text().toTitleCase(),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = null,
|
||||
state = null,
|
||||
source = source,
|
||||
isNsfw = isNsfwSource,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = webClient.httpGet("https://$domain/$listUrl").parseHtml()
|
||||
return doc.select("tr.gnrx div.custom-control").mapNotNullToSet { checkbox ->
|
||||
val key = checkbox.selectFirstOrThrow("input").attr("value") ?: return@mapNotNullToSet null
|
||||
val name = checkbox.selectFirstOrThrow("label").text()
|
||||
MangaTag(
|
||||
key = key,
|
||||
title = name,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectDesc = "div.series-synops"
|
||||
protected open val selectState = "span.status"
|
||||
protected open val selectAlt = "div.series-infolist li:contains(Alt) span"
|
||||
protected open val selectAut = "div.series-infolist li:contains(Author) span"
|
||||
protected open val selectTag = "div.series-genres a"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
|
||||
val fullUrl = manga.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
val chaptersDeferred = async { getChapters(manga, doc) }
|
||||
|
||||
val desc = doc.selectFirstOrThrow(selectDesc).html()
|
||||
|
||||
val stateDiv = doc.selectFirst(selectState)
|
||||
|
||||
val state = stateDiv?.let {
|
||||
when (it.text()) {
|
||||
in ongoing -> MangaState.ONGOING
|
||||
in finished -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
val alt = doc.body().select(selectAlt).text()
|
||||
|
||||
val aut = doc.body().select(selectAut).text()
|
||||
|
||||
val nsfw = doc.getElementById("adt-warning") != null
|
||||
|
||||
manga.copy(
|
||||
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
|
||||
title = a.text().toTitleCase().replace(",", ""),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
description = desc,
|
||||
altTitle = alt,
|
||||
author = aut,
|
||||
state = state,
|
||||
chapters = chaptersDeferred.await(),
|
||||
isNsfw = nsfw || manga.isNsfw,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
protected open val selectDate = "span.date"
|
||||
protected open val selectChapter = "ul.series-chapterlist li"
|
||||
|
||||
protected open suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = li.selectFirstOrThrow(".flexch-infoz span:not(.date)").text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected open val selectPage = "div.reader-area img"
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.toAbsoluteUrl(domain)
|
||||
val doc = webClient.httpGet(fullUrl).parseHtml()
|
||||
|
||||
return doc.select(selectPage).map { img ->
|
||||
val url = img.src()?.toRelativeUrl(domain) ?: img.parseFailed("Image src not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected fun Element.src(): String? {
|
||||
var result = absUrl("data-src")
|
||||
if (result.isEmpty()) result = absUrl("data-cfsrc")
|
||||
if (result.isEmpty()) result = absUrl("src")
|
||||
return result.ifEmpty { null }
|
||||
}
|
||||
|
||||
protected fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
val d = date?.lowercase() ?: return 0
|
||||
return when {
|
||||
d.endsWith(" ago") ||
|
||||
// short Hours
|
||||
d.endsWith(" h") ||
|
||||
// short Day
|
||||
d.endsWith(" d") -> parseRelativeDate(date)
|
||||
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
d.startsWith("year") -> Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
d.startsWith("today") -> Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"day",
|
||||
"days",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
|
||||
WordSet("hour", "hours", "h").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet(
|
||||
"min",
|
||||
"minute",
|
||||
"minutes",
|
||||
).anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
|
||||
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
import java.util.Locale
|
||||
|
||||
|
||||
@MangaSourceParser("HENSEKAI", "Hensekai", "id", ContentType.HENTAI)
|
||||
internal class Hensekai(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.HENSEKAI, "hensekai.com") {
|
||||
|
||||
override val sourceLocale: Locale = Locale.ENGLISH
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
|
||||
|
||||
@MangaSourceParser("KOMIKINDO_INFO", "KomikIndo Info", "id", ContentType.HENTAI)
|
||||
internal class KomikIndoInfo(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.KOMIKINDO_INFO, "komikindo.info") {
|
||||
|
||||
override val datePattern = "dd MMM yyyy"
|
||||
|
||||
}
|
||||
@ -0,0 +1,44 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
|
||||
import org.koitharu.kotatsu.parsers.util.generateUid
|
||||
import org.koitharu.kotatsu.parsers.util.mapChapters
|
||||
import org.koitharu.kotatsu.parsers.util.selectFirstOrThrow
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
// Info: Some scans are password-protected
|
||||
@MangaSourceParser("MAID_ID", "Maid Id", "id")
|
||||
internal class MaidId(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.MAID_ID, "www.maid.my.id"){
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
val numChapter = li.selectFirstOrThrow(".flexch-infoz span").html().substringAfterLast("Chapter ").substringBefore("<span")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = "Chapter $numChapter",
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,12 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
|
||||
|
||||
@MangaSourceParser("NEU_MANGA", "Neu Manga", "id")
|
||||
internal class NeuManga(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.NEU_MANGA, "neumanga.net")
|
||||
@ -0,0 +1,44 @@
|
||||
package org.koitharu.kotatsu.parsers.site.zmanga.id
|
||||
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaSourceParser
|
||||
import org.koitharu.kotatsu.parsers.model.ContentType
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.zmanga.ZMangaParser
|
||||
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
|
||||
import org.koitharu.kotatsu.parsers.util.generateUid
|
||||
import org.koitharu.kotatsu.parsers.util.mapChapters
|
||||
import org.koitharu.kotatsu.parsers.util.selectFirstOrThrow
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
@MangaSourceParser("SHIRO_DOUJIN", "Shiro Doujin", "id", ContentType.HENTAI)
|
||||
internal class ShiroDoujin(context: MangaLoaderContext) :
|
||||
ZMangaParser(context, MangaSource.SHIRO_DOUJIN, "shirodoujin.com"){
|
||||
|
||||
override suspend fun getChapters(manga: Manga, doc: Document): List<MangaChapter> {
|
||||
val dateFormat = SimpleDateFormat(datePattern, sourceLocale)
|
||||
return doc.body().select(selectChapter).mapChapters(reversed = true) { i, li ->
|
||||
val a = li.selectFirstOrThrow("a")
|
||||
val href = a.attrAsRelativeUrl("href")
|
||||
val dateText = li.selectFirst(selectDate)?.text()
|
||||
val numChapter = li.selectFirstOrThrow(".flexch-infoz span").html().substringAfterLast("Chapter ").substringBefore("<span")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = "Chapter $numChapter",
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
dateText,
|
||||
),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue