diff --git a/build.gradle b/build.gradle index 3a47f839..ab3cfdb7 100644 --- a/build.gradle +++ b/build.gradle @@ -59,6 +59,7 @@ dependencies { api 'org.jsoup:jsoup:1.16.1' implementation 'org.json:json:20230618' implementation 'androidx.collection:collection-ktx:1.2.0' + implementation "com.daveanthonythomas.moshipack:moshipack:1.0.1" ksp project(':kotatsu-parsers-ksp') diff --git a/src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/Anchira.kt b/src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/Anchira.kt new file mode 100644 index 00000000..3b841134 --- /dev/null +++ b/src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/Anchira.kt @@ -0,0 +1,194 @@ +package org.koitharu.kotatsu.parsers.site.en + +import com.daveanthonythomas.moshipack.MoshiPack +import okhttp3.Headers +import okhttp3.HttpUrl.Companion.toHttpUrl +import okhttp3.Response +import org.json.JSONArray +import org.json.JSONObject +import org.koitharu.kotatsu.parsers.MangaLoaderContext +import org.koitharu.kotatsu.parsers.MangaSourceParser +import org.koitharu.kotatsu.parsers.PagedMangaParser +import org.koitharu.kotatsu.parsers.config.ConfigKey +import org.koitharu.kotatsu.parsers.model.ContentType +import org.koitharu.kotatsu.parsers.model.Manga +import org.koitharu.kotatsu.parsers.model.MangaChapter +import org.koitharu.kotatsu.parsers.model.MangaPage +import org.koitharu.kotatsu.parsers.model.MangaSource +import org.koitharu.kotatsu.parsers.model.MangaState +import org.koitharu.kotatsu.parsers.model.MangaTag +import org.koitharu.kotatsu.parsers.model.RATING_UNKNOWN +import org.koitharu.kotatsu.parsers.model.SortOrder +import org.koitharu.kotatsu.parsers.util.domain +import org.koitharu.kotatsu.parsers.util.generateUid +import org.koitharu.kotatsu.parsers.util.json.getIntOrDefault +import org.koitharu.kotatsu.parsers.util.json.mapJSON +import org.koitharu.kotatsu.parsers.util.json.mapJSONToSet +import org.koitharu.kotatsu.parsers.util.mapToSet +import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl +import java.lang.IllegalArgumentException +import java.util.EnumSet + +@MangaSourceParser("ANCHIRA", "Anchira", "en", ContentType.HENTAI) +internal class Anchira(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.ANCHIRA, 24) { + + private fun Response.decodeAsJson(): String { + val data = use { it.body?.bytes() } ?: throw IllegalArgumentException("Response body is null") + + return MoshiPack().msgpackToJson(data) + } + + override val configKeyDomain: ConfigKey.Domain + get() = ConfigKey.Domain("anchira.to") + + override val sortOrders: Set + get() = EnumSet.of(SortOrder.UPDATED, SortOrder.POPULARITY, SortOrder.NEWEST, SortOrder.ALPHABETICAL) + + private val apiHeaders = Headers.Builder() + .set("X-Requested-With", "XMLHttpRequest") + .build() + + override suspend fun getTags(): Set { + val url = "https://$domain/api/v1/tags" + val rawJson = webClient.httpGet(url, apiHeaders).decodeAsJson() + + return JSONArray(rawJson).mapJSONToSet { + MangaTag( + title = it.getString("name"), + key = it.getString("name"), + source = source + ) + } + } + + override suspend fun getListPage( + page: Int, + query: String?, + tags: Set?, + sortOrder: SortOrder + ): List { + val url = "https://$domain/api/v1/library".toHttpUrl().newBuilder().apply { + var advQuery = "" + tags?.onEach { + advQuery += "tag:\"^${it.key}$\" " + } + if (advQuery.isNotEmpty()) { + addQueryParameter("s", query?.let { "$it $advQuery" } ?: advQuery) + } else if (!query.isNullOrEmpty()) { + addQueryParameter("s", query.trim()) + } + when (sortOrder) { + SortOrder.POPULARITY -> addQueryParameter("sort", "32") + SortOrder.NEWEST -> addQueryParameter("sort", "4") + SortOrder.ALPHABETICAL -> addQueryParameter("sort", "1") + else -> {} + } + addQueryParameter("page", "$page") + }.build() + + val rawJson = webClient.httpGet(url, apiHeaders).decodeAsJson() + + val entries = runCatching { JSONObject(rawJson).getJSONArray("entries") } + .getOrElse { return emptyList() } + + return entries.mapJSON { entry -> + val id = entry.getInt("id") + val key = entry.getString("key") + val entryTags = entry.getJSONArray("tags").mapJSON { + Tag( + it.getString("name"), + it.getIntOrDefault("namespace", -1) + ) + } + val coverFile = entry.getJSONObject("cover").getString("n") + + Manga( + id = generateUid("$id/$key"), + title = entry.getString("title"), + altTitle = null, + url = "$id/$key", + publicUrl = "/g/$id/$key".toAbsoluteUrl(domain), + rating = RATING_UNKNOWN, + isNsfw = true, + coverUrl = "$cdnUrl/$id/$key/m/$coverFile", + largeCoverUrl = "$cdnUrl/$id/$key/b/$coverFile", + tags = entryTags.mapToSet { + when (it.namespace) { + 1 -> MangaTag("Artist: ${it.name}", it.name, source) + 2 -> MangaTag("Circle: ${it.name}", it.name, source) + 3 -> MangaTag("Parody: ${it.name}", it.name, source) + 4 -> MangaTag("Magazine: ${it.name}", it.name, source) + else -> MangaTag(it.name, it.name, source) + } + }, + state = MangaState.FINISHED, + author = entryTags.filter { it.namespace == 1 }.joinToString { it.name }, + source = source, + ) + } + } + + data class Tag( + val name: String, + val namespace: Int, + ) + + override suspend fun getDetails(manga: Manga): Manga { + val url = "https://$domain/api/v1/library/${manga.url}" + val rawJson = webClient.httpGet(url, apiHeaders).decodeAsJson() + val jsonData = JSONObject(rawJson) + + return manga.copy( + altTitle = jsonData.getString("filename"), + chapters = listOf( + MangaChapter( + id = generateUid(manga.id), + name = manga.title, + number = 1, + url = manga.url, + scanlator = null, + uploadDate = jsonData.getLong("published_at") * 1000, + branch = null, + source = source, + ) + ) + ) + } + + override suspend fun getRelatedManga(seed: Manga): List { + val artistQuery = seed.author?.split(",") + ?.map(String::trim) + ?.filterNot(String::isEmpty) + ?.joinToString(" ") { + "artist:\"^$it$\"" + }.orEmpty().also { + if (it.isEmpty()) return emptyList() + } + + return getList(1, artistQuery) + } + + override suspend fun getPages(chapter: MangaChapter): List { + val url = "https://$domain/api/v1/library/${chapter.url}" + val rawJson = webClient.httpGet(url, apiHeaders).decodeAsJson() + val jsonData = JSONObject(rawJson) + + val id = jsonData.getInt("id") + val key = jsonData.getString("key") + val hash = jsonData.getString("hash") + + return jsonData.getJSONArray("data").mapJSON { pageData -> + val fileName = pageData.getString("n") + MangaPage( + id = generateUid(fileName), + url = "$cdnUrl/$id/$key/$hash/b/$fileName", + preview = "$cdnUrl/$id/$key/s/$fileName", + source = source + ) + } + } + + companion object { + private const val cdnUrl = "https://kisakisexo.xyz" + } +} diff --git a/src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/KskMoe.kt b/src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/KskMoe.kt deleted file mode 100644 index e886c0ea..00000000 --- a/src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/KskMoe.kt +++ /dev/null @@ -1,186 +0,0 @@ -package org.koitharu.kotatsu.parsers.site.en - -import androidx.collection.ArraySet -import kotlinx.coroutines.async -import kotlinx.coroutines.awaitAll -import kotlinx.coroutines.coroutineScope -import okhttp3.HttpUrl.Companion.toHttpUrlOrNull -import org.json.JSONArray -import org.jsoup.nodes.Element -import org.koitharu.kotatsu.parsers.MangaLoaderContext -import org.koitharu.kotatsu.parsers.MangaSourceParser -import org.koitharu.kotatsu.parsers.PagedMangaParser -import org.koitharu.kotatsu.parsers.config.ConfigKey -import org.koitharu.kotatsu.parsers.model.* -import org.koitharu.kotatsu.parsers.util.* -import org.koitharu.kotatsu.parsers.util.json.mapJSON -import java.text.SimpleDateFormat -import java.util.* - -@MangaSourceParser("KSKMOE", "Ksk.moe", "en", ContentType.HENTAI) -internal class KskMoe(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.KSKMOE, 35) { - - override val sortOrders: Set = - EnumSet.of(SortOrder.UPDATED, SortOrder.POPULARITY, SortOrder.NEWEST, SortOrder.ALPHABETICAL) - override val configKeyDomain = ConfigKey.Domain("ksk.moe") - - override suspend fun getListPage( - page: Int, - query: String?, - tags: Set?, - sortOrder: SortOrder, - ): List { - val tag = tags.oneOrThrowIfMany() - - val url = buildString { - append("https://") - append(domain) - - if (!tags.isNullOrEmpty()) { - append("/tags/") - append(tag?.key.orEmpty()) - } else { - append("/browse") - } - - if (page > 1) { - append("/page/") - append(page) - } - - when (sortOrder) { - SortOrder.POPULARITY -> append("?sort=32") - SortOrder.UPDATED -> append("") - SortOrder.NEWEST -> append("?sort=16") - SortOrder.ALPHABETICAL -> append("?sort=1") - else -> append("") - } - - if (!query.isNullOrEmpty()) { - append("?s=") - append(query.urlEncoded()) - } - } - val doc = webClient.httpGet(url).parseHtml() - - if (!doc.html().contains("pagination") && page > 1) { - return emptyList() - } - return doc.requireElementById("galleries").select("article").map { div -> - val href = div.selectFirstOrThrow("a").attrAsRelativeUrl("href") - Manga( - id = generateUid(href), - title = div.selectLastOrThrow("h3 span").text(), - altTitle = null, - url = href, - publicUrl = href.toAbsoluteUrl(domain), - rating = RATING_UNKNOWN, - isNsfw = true, - coverUrl = div.selectFirstOrThrow("img").src()?.toAbsoluteUrl(domain).orEmpty(), - tags = div.select("footer span").mapNotNullToSet { span -> - MangaTag( - key = span.text().urlEncoded(), - title = span.text(), - source = source, - ) - }, - state = null, - author = null, - source = source, - ) - } - } - - override suspend fun getTags(): Set { - return coroutineScope { - (1..2).map { page -> - async { getTags(page) } - } - }.awaitAll().flattenTo(ArraySet(360)) - } - - private suspend fun getTags(page: Int): Set { - val url = if (page == 1) { - "https://$domain/tags" - } else { - "https://$domain/tags/page/$page" - } - val root = webClient.httpGet(url).parseHtml().body().getElementById("tags") - return root?.parseTags().orEmpty() - } - - private fun Element.parseTags() = select("section.tags div a").mapToSet { a -> - MangaTag( - key = a.attr("href").substringAfterLast("/tags/"), - title = a.selectFirstOrThrow("span").text(), - source = source, - ) - } - - private val date = SimpleDateFormat("dd.MM.yyyy hh:mm 'UTC'", Locale.US) - override suspend fun getDetails(manga: Manga): Manga { - val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml() - - return manga.copy( - tags = doc.requireElementById("metadata").select("main div:contains(Tag) a").mapNotNullToSet { a -> - MangaTag( - key = a.attr("href").substringAfterLast("/tags/"), - title = a.selectFirstOrThrow("span").text(), - source = source, - ) - }, - author = doc.requireElementById("metadata").selectFirstOrThrow("main div:contains(Artist) a span").text(), - chapters = listOf( - MangaChapter( - id = generateUid(manga.id), - name = manga.title, - number = 1, - url = manga.url, - scanlator = null, - uploadDate = date.tryParse(doc.selectFirstOrThrow("time.updated").text()), - branch = null, - source = source, - ), - ), - ) - } - - override suspend fun getPages(chapter: MangaChapter): List { - val fullUrl = chapter.url - .replace("/view/", "/read/") - .let { "$it/1" } - .toAbsoluteUrl(domain) - val document = webClient.httpGet(fullUrl).parseHtml() - - val id = fullUrl - .substringAfter("/read/") - .substringBeforeLast("/") - - val cdnUrl = document.selectFirst("meta[itemprop=image]") - ?.attr("content") - ?.toHttpUrlOrNull() - ?.host - .let { "https://" + (it ?: domain) } - - val script = document.select("script:containsData(window.metadata)").html() - - val rawJson = script - .substringAfter("original:") - .substringBefore("resampled:") - .substringBeforeLast(",") - - return JSONArray(rawJson).mapJSON { - val fileName = it.getString("n") - - val url = "$cdnUrl/original/$id/$fileName" - val preview = "$cdnUrl/t/$id/320/$fileName" - - MangaPage( - id = generateUid(url), - url = url, - preview = preview, - source = source, - ) - } - } -}