Initial commit
parent
c2a711f14d
commit
3521e0b534
@ -0,0 +1,14 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_style = tab
|
||||
max_line_length = 120
|
||||
tab_width = 4
|
||||
insert_final_newline = false
|
||||
disabled_rules=no-wildcard-imports,no-unused-imports
|
||||
|
||||
[{*.kt,*.kts}]
|
||||
ij_kotlin_allow_trailing_comma = true
|
||||
ij_kotlin_allow_trailing_comma_on_call_site = true
|
||||
@ -0,0 +1,76 @@
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# Gradle and Maven with auto-import
|
||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||
# since they will be recreated, and may cause churn. Uncomment if using
|
||||
# auto-import.
|
||||
.idea/artifacts
|
||||
.idea/compiler.xml
|
||||
.idea/jarRepositories.xml
|
||||
.idea/modules.xml
|
||||
.idea/*.iml
|
||||
.idea/modules
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
|
||||
# Mongo Explorer plugin
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# Editor-based Rest Client
|
||||
.idea/httpRequests
|
||||
|
||||
# Android studio 3.1+ serialized cache file
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
.gradle/
|
||||
build/
|
||||
|
||||
.idea/**/misc.xml
|
||||
.idea/**/vcs.xml
|
||||
.idea/**/ktlint.xml
|
||||
.idea/codeStyles/
|
||||
@ -0,0 +1,3 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
@ -0,0 +1,45 @@
|
||||
plugins {
|
||||
id 'org.jetbrains.kotlin.jvm' version '1.6.20-M1'
|
||||
}
|
||||
|
||||
group = 'org.koitharu'
|
||||
version = '1.0'
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
google()
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
}
|
||||
|
||||
compileKotlin {
|
||||
kotlinOptions {
|
||||
jvmTarget = '1.8'
|
||||
freeCompilerArgs += [
|
||||
'-Xopt-in=kotlin.contracts.ExperimentalContracts',
|
||||
'-Xopt-in=kotlinx.coroutines.ExperimentalCoroutinesApi',
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
compileTestKotlin {
|
||||
kotlinOptions.jvmTarget = '1.8'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.0'
|
||||
implementation 'com.squareup.okhttp3:okhttp:4.9.3'
|
||||
implementation 'com.squareup.okio:okio:3.0.0'
|
||||
implementation 'org.jsoup:jsoup:1.14.3'
|
||||
implementation 'org.json:json:20211205'
|
||||
implementation 'androidx.collection:collection-ktx:1.2.0'
|
||||
|
||||
// testImplementation 'org.jetbrains.kotlin:kotlin-test'
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.2'
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.8.2'
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.8.2'
|
||||
testImplementation 'org.jetbrains.kotlinx:kotlinx-coroutines-test:1.6.0'
|
||||
testImplementation 'io.webfolder:quickjs:1.1.0'
|
||||
}
|
||||
@ -0,0 +1 @@
|
||||
kotlin.code.style=official
|
||||
Binary file not shown.
@ -0,0 +1,5 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
@ -0,0 +1,234 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Gradle start up script for POSIX generated by Gradle.
|
||||
#
|
||||
# Important for running:
|
||||
#
|
||||
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||
# noncompliant, but you have some other compliant shell such as ksh or
|
||||
# bash, then to run this script, type that shell name before the whole
|
||||
# command line, like:
|
||||
#
|
||||
# ksh Gradle
|
||||
#
|
||||
# Busybox and similar reduced shells will NOT work, because this script
|
||||
# requires all of these POSIX shell features:
|
||||
# * functions;
|
||||
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||
# * compound commands having a testable exit status, especially «case»;
|
||||
# * various built-in commands including «command», «set», and «ulimit».
|
||||
#
|
||||
# Important for patching:
|
||||
#
|
||||
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||
#
|
||||
# The "traditional" practice of packing multiple parameters into a
|
||||
# space-separated string is a well documented source of bugs and security
|
||||
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||
# options in "$@", and eventually passing that to Java.
|
||||
#
|
||||
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||
# see the in-line comments for details.
|
||||
#
|
||||
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
|
||||
# Resolve links: $0 may be a link
|
||||
app_path=$0
|
||||
|
||||
# Need this for daisy-chained symlinks.
|
||||
while
|
||||
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||
[ -h "$app_path" ]
|
||||
do
|
||||
ls=$( ls -ld "$app_path" )
|
||||
link=${ls#*' -> '}
|
||||
case $link in #(
|
||||
/*) app_path=$link ;; #(
|
||||
*) app_path=$APP_HOME$link ;;
|
||||
esac
|
||||
done
|
||||
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=${0##*/}
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
} >&2
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
} >&2
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "$( uname )" in #(
|
||||
CYGWIN* ) cygwin=true ;; #(
|
||||
Darwin* ) darwin=true ;; #(
|
||||
MSYS* | MINGW* ) msys=true ;; #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||
else
|
||||
JAVACMD=$JAVA_HOME/bin/java
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD=java
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command, stacking in reverse order:
|
||||
# * args from the command line
|
||||
# * the main class name
|
||||
# * -classpath
|
||||
# * -D...appname settings
|
||||
# * --module-path (only if needed)
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
for arg do
|
||||
if
|
||||
case $arg in #(
|
||||
-*) false ;; # don't mess with options #(
|
||||
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||
[ -e "$t" ] ;; #(
|
||||
*) false ;;
|
||||
esac
|
||||
then
|
||||
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||
fi
|
||||
# Roll the args list around exactly as many times as the number of
|
||||
# args, so each arg winds up back in the position where it started, but
|
||||
# possibly modified.
|
||||
#
|
||||
# NB: a `for` loop captures its iteration list before it begins, so
|
||||
# changing the positional parameters here affects neither the number of
|
||||
# iterations, nor the values presented in `arg`.
|
||||
shift # remove old arg
|
||||
set -- "$@" "$arg" # push replacement arg
|
||||
done
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command;
|
||||
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||
# shell script including quotes and variable substitutions, so put them in
|
||||
# double quotes to make sure that they get re-expanded; and
|
||||
# * put everything else in single quotes, so that it's not re-expanded.
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
#
|
||||
# In Bash we could simply go:
|
||||
#
|
||||
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||
# set -- "${ARGS[@]}" "$@"
|
||||
#
|
||||
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||
# character that might be a shell metacharacter, then use eval to reverse
|
||||
# that process (while maintaining the separation between arguments), and wrap
|
||||
# the whole thing up as a single "set" statement.
|
||||
#
|
||||
# This will of course break if any of these variables contains a newline or
|
||||
# an unmatched quote.
|
||||
#
|
||||
|
||||
eval "set -- $(
|
||||
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||
xargs -n1 |
|
||||
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||
tr '\n' ' '
|
||||
)" '"$@"'
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
@ -0,0 +1,89 @@
|
||||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
||||
@ -0,0 +1,3 @@
|
||||
|
||||
rootProject.name = 'kotatsu-parsers'
|
||||
|
||||
@ -0,0 +1,90 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import okhttp3.*
|
||||
import okhttp3.MediaType.Companion.toMediaType
|
||||
import okhttp3.RequestBody.Companion.toRequestBody
|
||||
import org.json.JSONObject
|
||||
import org.koitharu.kotatsu.parsers.exception.GraphQLException
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.util.await
|
||||
import org.koitharu.kotatsu.parsers.util.parseJson
|
||||
import java.util.*
|
||||
|
||||
abstract class MangaLoaderContext {
|
||||
|
||||
protected abstract val httpClient: OkHttpClient
|
||||
|
||||
abstract val cookieJar: CookieJar
|
||||
|
||||
suspend fun httpGet(url: String, headers: Headers? = null): Response {
|
||||
val request = Request.Builder()
|
||||
.get()
|
||||
.url(url)
|
||||
if (headers != null) {
|
||||
request.headers(headers)
|
||||
}
|
||||
return httpClient.newCall(request.build()).await()
|
||||
}
|
||||
|
||||
suspend fun httpPost(
|
||||
url: String,
|
||||
form: Map<String, String>,
|
||||
): Response {
|
||||
val body = FormBody.Builder()
|
||||
form.forEach { (k, v) ->
|
||||
body.addEncoded(k, v)
|
||||
}
|
||||
val request = Request.Builder()
|
||||
.post(body.build())
|
||||
.url(url)
|
||||
return httpClient.newCall(request.build()).await()
|
||||
}
|
||||
|
||||
suspend fun httpPost(
|
||||
url: String,
|
||||
payload: String,
|
||||
): Response {
|
||||
val body = FormBody.Builder()
|
||||
payload.split('&').forEach {
|
||||
val pos = it.indexOf('=')
|
||||
if (pos != -1) {
|
||||
val k = it.substring(0, pos)
|
||||
val v = it.substring(pos + 1)
|
||||
body.addEncoded(k, v)
|
||||
}
|
||||
}
|
||||
val request = Request.Builder()
|
||||
.post(body.build())
|
||||
.url(url)
|
||||
return httpClient.newCall(request.build()).await()
|
||||
}
|
||||
|
||||
suspend fun graphQLQuery(endpoint: String, query: String): JSONObject {
|
||||
val body = JSONObject()
|
||||
body.put("operationName", null as Any?)
|
||||
body.put("variables", JSONObject())
|
||||
body.put("query", "{$query}")
|
||||
val mediaType = "application/json; charset=utf-8".toMediaType()
|
||||
val requestBody = body.toString().toRequestBody(mediaType)
|
||||
val request = Request.Builder()
|
||||
.post(requestBody)
|
||||
.url(endpoint)
|
||||
val json = httpClient.newCall(request.build()).await().parseJson()
|
||||
json.optJSONArray("errors")?.let {
|
||||
if (it.length() != 0) {
|
||||
throw GraphQLException(it)
|
||||
}
|
||||
}
|
||||
return json
|
||||
}
|
||||
|
||||
open fun encodeBase64(data: ByteArray): String = Base64.getEncoder().encodeToString(data)
|
||||
|
||||
open fun decodeBase64(data: String): ByteArray = Base64.getDecoder().decode(data)
|
||||
|
||||
open fun getPreferredLocales(): List<Locale> = listOf(Locale.getDefault())
|
||||
|
||||
abstract suspend fun evaluateJs(script: String): String?
|
||||
|
||||
abstract fun getConfig(source: MangaSource): MangaSourceConfig
|
||||
}
|
||||
@ -0,0 +1,91 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
|
||||
abstract class MangaParser {
|
||||
|
||||
protected abstract val context: MangaLoaderContext
|
||||
|
||||
abstract val source: MangaSource
|
||||
|
||||
abstract val sortOrders: Set<SortOrder>
|
||||
|
||||
abstract val defaultDomain: String
|
||||
|
||||
protected val config by lazy { context.getConfig(source) }
|
||||
|
||||
abstract suspend fun getList(
|
||||
offset: Int,
|
||||
query: String? = null,
|
||||
tags: Set<MangaTag>? = null,
|
||||
sortOrder: SortOrder? = null,
|
||||
): List<Manga>
|
||||
|
||||
abstract suspend fun getDetails(manga: Manga): Manga
|
||||
|
||||
abstract suspend fun getPages(chapter: MangaChapter): List<MangaPage>
|
||||
|
||||
open suspend fun getPageUrl(page: MangaPage): String = page.url.withDomain()
|
||||
|
||||
abstract suspend fun getTags(): Set<MangaTag>
|
||||
|
||||
open fun getFaviconUrl() = "https://${getDomain()}/favicon.ico"
|
||||
|
||||
/* Utils */
|
||||
|
||||
protected fun getDomain(): String {
|
||||
return config.getDomain(defaultDomain)
|
||||
}
|
||||
|
||||
protected fun generateUid(url: String): Long {
|
||||
var h = 1125899906842597L
|
||||
source.name.forEach { c ->
|
||||
h = 31 * h + c.code
|
||||
}
|
||||
url.forEach { c ->
|
||||
h = 31 * h + c.code
|
||||
}
|
||||
return h
|
||||
}
|
||||
|
||||
protected fun generateUid(id: Long): Long {
|
||||
var h = 1125899906842597L
|
||||
source.name.forEach { c ->
|
||||
h = 31 * h + c.code
|
||||
}
|
||||
h = 31 * h + id
|
||||
return h
|
||||
}
|
||||
|
||||
protected fun String.withDomain(subdomain: String? = null) = when {
|
||||
this.startsWith("//") -> buildString {
|
||||
append("http")
|
||||
if (config.isSslEnabled(true)) {
|
||||
append('s')
|
||||
}
|
||||
append(":")
|
||||
append(this@withDomain)
|
||||
}
|
||||
this.startsWith("/") -> buildString {
|
||||
append("http")
|
||||
if (config.isSslEnabled(true)) {
|
||||
append('s')
|
||||
}
|
||||
append("://")
|
||||
if (subdomain != null) {
|
||||
append(subdomain)
|
||||
append('.')
|
||||
append(config.getDomain(defaultDomain).removePrefix("www."))
|
||||
} else {
|
||||
append(config.getDomain(defaultDomain))
|
||||
}
|
||||
append(this@withDomain)
|
||||
}
|
||||
else -> this
|
||||
}
|
||||
|
||||
protected fun parseFailed(message: String? = null): Nothing {
|
||||
throw ParseException(message)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
interface MangaParserAuthProvider {
|
||||
|
||||
val authUrl: String
|
||||
|
||||
val isAuthorized: Boolean
|
||||
|
||||
suspend fun getUsername(): String
|
||||
}
|
||||
@ -0,0 +1,38 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.site.*
|
||||
|
||||
fun MangaSource.newParser(context: MangaLoaderContext): MangaParser = when (this) {
|
||||
MangaSource.READMANGA_RU -> ReadmangaParser(context)
|
||||
MangaSource.MINTMANGA -> MintMangaParser(context)
|
||||
MangaSource.SELFMANGA -> SelfMangaParser(context)
|
||||
MangaSource.MANGACHAN -> MangaChanParser(context)
|
||||
MangaSource.DESUME -> DesuMeParser(context)
|
||||
MangaSource.HENCHAN -> HenChanParser(context)
|
||||
MangaSource.YAOICHAN -> YaoiChanParser(context)
|
||||
MangaSource.MANGATOWN -> MangaTownParser(context)
|
||||
MangaSource.MANGALIB -> MangaLibParser(context)
|
||||
MangaSource.NUDEMOON -> NudeMoonParser(context)
|
||||
MangaSource.MANGAREAD -> MangareadParser(context)
|
||||
MangaSource.REMANGA -> RemangaParser(context)
|
||||
MangaSource.HENTAILIB -> HentaiLibParser(context)
|
||||
MangaSource.ANIBEL -> AnibelParser(context)
|
||||
MangaSource.NINEMANGA_EN -> NineMangaParser.English(context)
|
||||
MangaSource.NINEMANGA_ES -> NineMangaParser.Spanish(context)
|
||||
MangaSource.NINEMANGA_RU -> NineMangaParser.Russian(context)
|
||||
MangaSource.NINEMANGA_DE -> NineMangaParser.Deutsch(context)
|
||||
MangaSource.NINEMANGA_IT -> NineMangaParser.Italiano(context)
|
||||
MangaSource.NINEMANGA_BR -> NineMangaParser.Brazil(context)
|
||||
MangaSource.NINEMANGA_FR -> NineMangaParser.Francais(context)
|
||||
MangaSource.EXHENTAI -> ExHentaiParser(context)
|
||||
MangaSource.MANGAOWL -> MangaOwlParser(context)
|
||||
MangaSource.MANGADEX -> MangaDexParser(context)
|
||||
MangaSource.BATOTO -> BatoToParser(context)
|
||||
MangaSource.COMICK_FUN -> ComickFunParser(context)
|
||||
MangaSource.LOCAL -> throw NotImplementedError("Local manga parser is not supported")
|
||||
}.also {
|
||||
require(it.source == this) {
|
||||
"Cannot instantiate manga parser: $name mapped to ${it.source}"
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,6 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
interface MangaSourceConfig {
|
||||
fun getDomain(defaultValue: String): String
|
||||
fun isSslEnabled(defaultValue: Boolean): Boolean
|
||||
}
|
||||
@ -0,0 +1,7 @@
|
||||
package org.koitharu.kotatsu.parsers.exception
|
||||
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
|
||||
class AuthRequiredException(
|
||||
val source: MangaSource,
|
||||
) : RuntimeException("Authorization required")
|
||||
@ -0,0 +1,7 @@
|
||||
package org.koitharu.kotatsu.parsers.exception
|
||||
|
||||
import okio.IOException
|
||||
|
||||
class CloudFlareProtectedException(
|
||||
val url: String,
|
||||
) : IOException("Protected by CloudFlare")
|
||||
@ -0,0 +1,15 @@
|
||||
package org.koitharu.kotatsu.parsers.exception
|
||||
|
||||
import okio.IOException
|
||||
import org.json.JSONArray
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
|
||||
class GraphQLException(private val errors: JSONArray) : IOException() {
|
||||
|
||||
val messages = errors.mapJSON {
|
||||
it.getString("message")
|
||||
}
|
||||
|
||||
override val message: String
|
||||
get() = messages.joinToString("\n")
|
||||
}
|
||||
@ -0,0 +1,6 @@
|
||||
package org.koitharu.kotatsu.parsers.exception
|
||||
|
||||
class ParseException(
|
||||
message: String? = null,
|
||||
cause: Throwable? = null,
|
||||
) : RuntimeException(message, cause)
|
||||
@ -0,0 +1,25 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
data class Manga(
|
||||
val id: Long,
|
||||
val title: String,
|
||||
val altTitle: String? = null,
|
||||
val url: String, // relative url for internal use
|
||||
val publicUrl: String,
|
||||
val rating: Float = NO_RATING, // normalized value [0..1] or -1
|
||||
val isNsfw: Boolean = false,
|
||||
val coverUrl: String,
|
||||
val largeCoverUrl: String? = null,
|
||||
val description: String? = null, // HTML
|
||||
val tags: Set<MangaTag> = emptySet(),
|
||||
val state: MangaState? = null,
|
||||
val author: String? = null,
|
||||
val chapters: List<MangaChapter>? = null,
|
||||
val source: MangaSource,
|
||||
) {
|
||||
|
||||
companion object {
|
||||
|
||||
const val NO_RATING = -1f
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
data class MangaChapter(
|
||||
val id: Long,
|
||||
val name: String,
|
||||
val number: Int,
|
||||
val url: String,
|
||||
val scanlator: String?,
|
||||
val uploadDate: Long,
|
||||
val branch: String?,
|
||||
val source: MangaSource,
|
||||
) : Comparable<MangaChapter> {
|
||||
|
||||
override fun compareTo(other: MangaChapter): Int {
|
||||
return number.compareTo(other.number)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
data class MangaPage(
|
||||
val id: Long,
|
||||
val url: String,
|
||||
val referer: String,
|
||||
val preview: String?,
|
||||
val source: MangaSource,
|
||||
)
|
||||
@ -0,0 +1,36 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
@Suppress("SpellCheckingInspection")
|
||||
enum class MangaSource(
|
||||
val title: String,
|
||||
val locale: String?,
|
||||
) {
|
||||
LOCAL("Local", null),
|
||||
READMANGA_RU("ReadManga", "ru"),
|
||||
MINTMANGA("MintManga", "ru"),
|
||||
SELFMANGA("SelfManga", "ru"),
|
||||
MANGACHAN("Манга-тян", "ru"),
|
||||
DESUME("Desu.me", "ru"),
|
||||
HENCHAN("Хентай-тян", "ru"),
|
||||
YAOICHAN("Яой-тян", "ru"),
|
||||
MANGATOWN("MangaTown", "en"),
|
||||
MANGALIB("MangaLib", "ru"),
|
||||
NUDEMOON("Nude-Moon", "ru"),
|
||||
MANGAREAD("MangaRead", "en"),
|
||||
REMANGA("Remanga", "ru"),
|
||||
HENTAILIB("HentaiLib", "ru"),
|
||||
ANIBEL("Anibel", "be"),
|
||||
NINEMANGA_EN("NineManga English", "en"),
|
||||
NINEMANGA_ES("NineManga Español", "es"),
|
||||
NINEMANGA_RU("NineManga Русский", "ru"),
|
||||
NINEMANGA_DE("NineManga Deutsch", "de"),
|
||||
NINEMANGA_IT("NineManga Italiano", "it"),
|
||||
NINEMANGA_BR("NineManga Brasil", "pt"),
|
||||
NINEMANGA_FR("NineManga Français", "fr"),
|
||||
EXHENTAI("ExHentai", null),
|
||||
MANGAOWL("MangaOwl", "en"),
|
||||
MANGADEX("MangaDex", null),
|
||||
BATOTO("Bato.To", null),
|
||||
COMICK_FUN("ComicK", null),
|
||||
;
|
||||
}
|
||||
@ -0,0 +1,5 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
enum class MangaState {
|
||||
ONGOING, FINISHED
|
||||
}
|
||||
@ -0,0 +1,7 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
data class MangaTag(
|
||||
val title: String,
|
||||
val key: String,
|
||||
val source: MangaSource,
|
||||
)
|
||||
@ -0,0 +1,9 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
enum class SortOrder {
|
||||
UPDATED,
|
||||
POPULARITY,
|
||||
RATING,
|
||||
NEWEST,
|
||||
ALPHABETICAL
|
||||
}
|
||||
@ -0,0 +1,8 @@
|
||||
package org.koitharu.kotatsu.parsers.model
|
||||
|
||||
class WordSet(private vararg val words: String) {
|
||||
|
||||
fun anyWordIn(dateString: String): Boolean = words.any {
|
||||
dateString.contains(it, ignoreCase = true)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,262 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import androidx.collection.ArraySet
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSONIndexed
|
||||
import org.koitharu.kotatsu.parsers.util.json.stringIterator
|
||||
import java.util.*
|
||||
|
||||
internal class AnibelParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val source = MangaSource.ANIBEL
|
||||
|
||||
override val defaultDomain = "anibel.net"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.NEWEST,
|
||||
)
|
||||
|
||||
override fun getFaviconUrl(): String {
|
||||
return "https://cdn.${getDomain()}/favicons/favicon.png"
|
||||
}
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
return if (offset == 0) {
|
||||
search(query)
|
||||
} else {
|
||||
emptyList()
|
||||
}
|
||||
}
|
||||
val filters = tags?.takeUnless { it.isEmpty() }?.joinToString(
|
||||
separator = ",",
|
||||
prefix = "genres: [",
|
||||
postfix = "]",
|
||||
) { "\"it.key\"" }.orEmpty()
|
||||
val array = apiCall(
|
||||
"""
|
||||
getMediaList(offset: $offset, limit: 20, mediaType: manga, filters: {$filters}) {
|
||||
docs {
|
||||
mediaId
|
||||
title {
|
||||
be
|
||||
alt
|
||||
}
|
||||
rating
|
||||
poster
|
||||
genres
|
||||
slug
|
||||
mediaType
|
||||
status
|
||||
}
|
||||
}
|
||||
""".trimIndent(),
|
||||
).getJSONObject("getMediaList").getJSONArray("docs")
|
||||
return array.mapJSON { jo ->
|
||||
val mediaId = jo.getString("mediaId")
|
||||
val title = jo.getJSONObject("title")
|
||||
val href = "${jo.getString("mediaType")}/${jo.getString("slug")}"
|
||||
Manga(
|
||||
id = generateUid(mediaId),
|
||||
title = title.getString("be"),
|
||||
coverUrl = jo.getString("poster").removePrefix("/cdn")
|
||||
.withDomain("cdn") + "?width=200&height=280",
|
||||
altTitle = title.getString("alt").takeUnless(String::isEmpty),
|
||||
author = null,
|
||||
rating = jo.getDouble("rating").toFloat() / 10f,
|
||||
url = href,
|
||||
publicUrl = "https://${getDomain()}/$href",
|
||||
tags = jo.getJSONArray("genres").mapToTags(),
|
||||
state = when (jo.getString("status")) {
|
||||
"ongoing" -> MangaState.ONGOING
|
||||
"finished" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val (type, slug) = manga.url.split('/')
|
||||
val details = apiCall(
|
||||
"""
|
||||
media(mediaType: $type, slug: "$slug") {
|
||||
mediaId
|
||||
title {
|
||||
be
|
||||
alt
|
||||
}
|
||||
description {
|
||||
be
|
||||
}
|
||||
status
|
||||
poster
|
||||
rating
|
||||
genres
|
||||
}
|
||||
""".trimIndent(),
|
||||
).getJSONObject("media")
|
||||
val title = details.getJSONObject("title")
|
||||
val poster = details.getString("poster").removePrefix("/cdn")
|
||||
.withDomain("cdn")
|
||||
val chapters = apiCall(
|
||||
"""
|
||||
chapters(mediaId: "${details.getString("mediaId")}") {
|
||||
id
|
||||
chapter
|
||||
released
|
||||
}
|
||||
""".trimIndent(),
|
||||
).getJSONArray("chapters")
|
||||
return manga.copy(
|
||||
title = title.getString("be"),
|
||||
altTitle = title.getString("alt"),
|
||||
coverUrl = "$poster?width=200&height=280",
|
||||
largeCoverUrl = poster,
|
||||
description = details.getJSONObject("description").getString("be"),
|
||||
rating = details.getDouble("rating").toFloat() / 10f,
|
||||
tags = details.getJSONArray("genres").mapToTags(),
|
||||
state = when (details.getString("status")) {
|
||||
"ongoing" -> MangaState.ONGOING
|
||||
"finished" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
chapters = chapters.mapJSON { jo ->
|
||||
val number = jo.getInt("chapter")
|
||||
MangaChapter(
|
||||
id = generateUid(jo.getString("id")),
|
||||
name = "Глава $number",
|
||||
number = number,
|
||||
url = "${manga.url}/read/$number",
|
||||
scanlator = null,
|
||||
uploadDate = jo.getLong("released"),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val (_, slug, _, number) = chapter.url.split('/')
|
||||
val chapterJson = apiCall(
|
||||
"""
|
||||
chapter(slug: "$slug", chapter: $number) {
|
||||
id
|
||||
images {
|
||||
large
|
||||
thumbnail
|
||||
}
|
||||
}
|
||||
""".trimIndent(),
|
||||
).getJSONObject("chapter")
|
||||
val pages = chapterJson.getJSONArray("images")
|
||||
val chapterUrl = "https://${getDomain()}/${chapter.url}"
|
||||
return pages.mapJSONIndexed { i, jo ->
|
||||
MangaPage(
|
||||
id = generateUid("${chapter.url}/$i"),
|
||||
url = jo.getString("large"),
|
||||
referer = chapterUrl,
|
||||
preview = jo.getString("thumbnail"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val json = apiCall(
|
||||
"""
|
||||
getFilters(mediaType: manga) {
|
||||
genres
|
||||
}
|
||||
""".trimIndent(),
|
||||
)
|
||||
val array = json.getJSONObject("getFilters").getJSONArray("genres")
|
||||
return array.mapToTags()
|
||||
}
|
||||
|
||||
private suspend fun search(query: String): List<Manga> {
|
||||
val json = apiCall(
|
||||
"""
|
||||
search(query: "$query", limit: 40) {
|
||||
id
|
||||
title {
|
||||
be
|
||||
en
|
||||
}
|
||||
poster
|
||||
url
|
||||
type
|
||||
}
|
||||
""".trimIndent(),
|
||||
)
|
||||
val array = json.getJSONArray("search")
|
||||
return array.mapJSON { jo ->
|
||||
val mediaId = jo.getString("id")
|
||||
val title = jo.getJSONObject("title")
|
||||
val href = "${jo.getString("type").lowercase()}/${jo.getString("url")}"
|
||||
Manga(
|
||||
id = generateUid(mediaId),
|
||||
title = title.getString("be"),
|
||||
coverUrl = jo.getString("poster").removePrefix("/cdn")
|
||||
.withDomain("cdn") + "?width=200&height=280",
|
||||
altTitle = title.getString("en").takeUnless(String::isEmpty),
|
||||
author = null,
|
||||
rating = Manga.NO_RATING,
|
||||
url = href,
|
||||
publicUrl = "https://${getDomain()}/$href",
|
||||
tags = emptySet(),
|
||||
state = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun apiCall(request: String): JSONObject {
|
||||
return context.graphQLQuery("https://api.${getDomain()}/graphql", request)
|
||||
.getJSONObject("data")
|
||||
}
|
||||
|
||||
private fun JSONArray.mapToTags(): Set<MangaTag> {
|
||||
|
||||
fun toTitle(slug: String): String {
|
||||
val builder = StringBuilder(slug)
|
||||
var capitalize = true
|
||||
for ((i, c) in builder.withIndex()) {
|
||||
when {
|
||||
c == '-' -> {
|
||||
builder.setCharAt(i, ' ')
|
||||
}
|
||||
capitalize -> {
|
||||
builder.setCharAt(i, c.uppercaseChar())
|
||||
capitalize = false
|
||||
}
|
||||
}
|
||||
}
|
||||
return builder.toString()
|
||||
}
|
||||
|
||||
val result = ArraySet<MangaTag>(length())
|
||||
stringIterator().forEach {
|
||||
result.add(
|
||||
MangaTag(
|
||||
title = toTitle(it),
|
||||
key = it,
|
||||
source = source,
|
||||
),
|
||||
)
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,307 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import androidx.collection.ArraySet
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import org.jsoup.nodes.Element
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.security.MessageDigest
|
||||
import java.util.*
|
||||
import javax.crypto.Cipher
|
||||
import javax.crypto.spec.IvParameterSpec
|
||||
import javax.crypto.spec.SecretKeySpec
|
||||
|
||||
private const val PAGE_SIZE = 60
|
||||
private const val PAGE_SIZE_SEARCH = 20
|
||||
|
||||
internal class BatoToParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val source = MangaSource.BATOTO
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
)
|
||||
|
||||
override val defaultDomain: String = "bato.to"
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
return search(offset, query)
|
||||
}
|
||||
val page = (offset / PAGE_SIZE) + 1
|
||||
|
||||
@Suppress("NON_EXHAUSTIVE_WHEN_STATEMENT")
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(getDomain())
|
||||
append("/browse?sort=")
|
||||
when (sortOrder) {
|
||||
null,
|
||||
SortOrder.UPDATED,
|
||||
-> append("update.za")
|
||||
SortOrder.POPULARITY -> append("views_a.za")
|
||||
SortOrder.NEWEST -> append("create.za")
|
||||
SortOrder.ALPHABETICAL -> append("title.az")
|
||||
}
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("&genres=")
|
||||
appendAll(tags, ",") { it.key }
|
||||
}
|
||||
append("&page=")
|
||||
append(page)
|
||||
}
|
||||
return parseList(url, page)
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val root = context.httpGet(manga.url.withDomain()).parseHtml()
|
||||
.getElementById("mainer") ?: parseFailed("Cannot find root")
|
||||
val details = root.selectFirst(".detail-set") ?: parseFailed("Cannot find detail-set")
|
||||
val attrs = details.selectFirst(".attr-main")?.select(".attr-item")?.associate {
|
||||
it.child(0).text().trim() to it.child(1)
|
||||
}.orEmpty()
|
||||
return manga.copy(
|
||||
title = root.selectFirst("h3.item-title")?.text() ?: manga.title,
|
||||
isNsfw = !root.selectFirst("alert")?.getElementsContainingOwnText("NSFW").isNullOrEmpty(),
|
||||
largeCoverUrl = details.selectFirst("img[src]")?.absUrl("src"),
|
||||
description = details.getElementById("limit-height-body-summary")
|
||||
?.selectFirst(".limit-html")
|
||||
?.html(),
|
||||
tags = manga.tags + attrs["Genres:"]?.parseTags().orEmpty(),
|
||||
state = when (attrs["Release status:"]?.text()) {
|
||||
"Ongoing" -> MangaState.ONGOING
|
||||
"Completed" -> MangaState.FINISHED
|
||||
else -> manga.state
|
||||
},
|
||||
author = attrs["Authors:"]?.text()?.trim() ?: manga.author,
|
||||
chapters = root.selectFirst(".episode-list")
|
||||
?.selectFirst(".main")
|
||||
?.children()
|
||||
?.reversed()
|
||||
?.mapIndexedNotNull { i, div ->
|
||||
div.parseChapter(i)
|
||||
}.orEmpty(),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val scripts = context.httpGet(fullUrl).parseHtml().select("script")
|
||||
for (script in scripts) {
|
||||
val scriptSrc = script.html()
|
||||
val p = scriptSrc.indexOf("const images =")
|
||||
if (p == -1) continue
|
||||
val start = scriptSrc.indexOf('[', p)
|
||||
val end = scriptSrc.indexOf(';', start)
|
||||
if (start == -1 || end == -1) {
|
||||
continue
|
||||
}
|
||||
val images = JSONArray(scriptSrc.substring(start, end))
|
||||
val batoJs = scriptSrc.substringBetweenFirst("batojs =", ";")?.trim(' ', '"', '\n')
|
||||
?: parseFailed("Cannot find batojs")
|
||||
val server = scriptSrc.substringBetweenFirst("server =", ";")?.trim(' ', '"', '\n')
|
||||
?: parseFailed("Cannot find server")
|
||||
val password = context.evaluateJs(batoJs)?.removeSurrounding('"')
|
||||
?: parseFailed("Cannot evaluate batojs")
|
||||
val serverDecrypted = decryptAES(server, password).removeSurrounding('"')
|
||||
val result = ArrayList<MangaPage>(images.length())
|
||||
repeat(images.length()) { i ->
|
||||
val url = images.getString(i)
|
||||
result += MangaPage(
|
||||
id = generateUid(url),
|
||||
url = if (url.startsWith("http")) url else "$serverDecrypted$url",
|
||||
referer = fullUrl,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
return result
|
||||
}
|
||||
parseFailed("Cannot find images list")
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val scripts = context.httpGet(
|
||||
"https://${getDomain()}/browse",
|
||||
).parseHtml().select("script")
|
||||
for (script in scripts) {
|
||||
val genres = script.html().substringBetweenFirst("const _genres =", ";") ?: continue
|
||||
val jo = JSONObject(genres)
|
||||
val result = ArraySet<MangaTag>(jo.length())
|
||||
jo.keys().forEach { key ->
|
||||
val item = jo.getJSONObject(key)
|
||||
result += MangaTag(
|
||||
title = item.getString("text").toTitleCase(),
|
||||
key = item.getString("file"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
return result
|
||||
}
|
||||
parseFailed("Cannot find gernes list")
|
||||
}
|
||||
|
||||
override fun getFaviconUrl(): String = "https://styles.amarkcdn.com/img/batoto/favicon.ico?v0"
|
||||
|
||||
private suspend fun search(offset: Int, query: String): List<Manga> {
|
||||
val page = (offset / PAGE_SIZE_SEARCH) + 1
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(getDomain())
|
||||
append("/search?word=")
|
||||
append(query.replace(' ', '+'))
|
||||
append("&page=")
|
||||
append(page)
|
||||
}
|
||||
return parseList(url, page)
|
||||
}
|
||||
|
||||
private fun getActivePage(body: Element): Int = body.select("nav ul.pagination > li.page-item.active")
|
||||
.lastOrNull()
|
||||
?.text()
|
||||
?.toIntOrNull() ?: parseFailed("Cannot determine current page")
|
||||
|
||||
private suspend fun parseList(url: String, page: Int): List<Manga> {
|
||||
val body = context.httpGet(url).parseHtml().body()
|
||||
if (body.selectFirst(".browse-no-matches") != null) {
|
||||
return emptyList()
|
||||
}
|
||||
val activePage = getActivePage(body)
|
||||
if (activePage != page) {
|
||||
return emptyList()
|
||||
}
|
||||
val root = body.getElementById("series-list") ?: parseFailed("Cannot find root")
|
||||
return root.children().map { div ->
|
||||
val a = div.selectFirst("a") ?: parseFailed()
|
||||
val href = a.relUrl("href")
|
||||
val title = div.selectFirst(".item-title")?.text() ?: parseFailed("Title not found")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = title,
|
||||
altTitle = div.selectFirst(".item-alias")?.text()?.takeUnless { it == title },
|
||||
url = href,
|
||||
publicUrl = a.absUrl("href"),
|
||||
rating = Manga.NO_RATING,
|
||||
isNsfw = false,
|
||||
coverUrl = div.selectFirst("img[src]")?.absUrl("src").orEmpty(),
|
||||
largeCoverUrl = null,
|
||||
description = null,
|
||||
tags = div.selectFirst(".item-genre")?.parseTags().orEmpty(),
|
||||
state = null,
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun Element.parseTags() = children().mapToSet { span ->
|
||||
val text = span.ownText()
|
||||
MangaTag(
|
||||
title = text.toTitleCase(),
|
||||
key = text.lowercase(Locale.ENGLISH).replace(' ', '_'),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
|
||||
private fun Element.parseChapter(index: Int): MangaChapter? {
|
||||
val a = selectFirst("a.chapt") ?: return null
|
||||
val extra = selectFirst(".extra")
|
||||
val href = a.relUrl("href")
|
||||
return MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.text(),
|
||||
number = index + 1,
|
||||
url = href,
|
||||
scanlator = extra?.getElementsByAttributeValueContaining("href", "/group/")?.text(),
|
||||
uploadDate = runCatching {
|
||||
parseChapterDate(extra?.select("i")?.lastOrNull()?.ownText())
|
||||
}.getOrDefault(0),
|
||||
branch = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
|
||||
private fun parseChapterDate(date: String?): Long {
|
||||
if (date.isNullOrEmpty()) {
|
||||
return 0
|
||||
}
|
||||
val value = date.substringBefore(' ').toInt()
|
||||
val field = when {
|
||||
"sec" in date -> Calendar.SECOND
|
||||
"min" in date -> Calendar.MINUTE
|
||||
"hour" in date -> Calendar.HOUR
|
||||
"day" in date -> Calendar.DAY_OF_MONTH
|
||||
"week" in date -> Calendar.WEEK_OF_YEAR
|
||||
"month" in date -> Calendar.MONTH
|
||||
"year" in date -> Calendar.YEAR
|
||||
else -> return 0
|
||||
}
|
||||
val calendar = Calendar.getInstance()
|
||||
calendar.add(field, -value)
|
||||
return calendar.timeInMillis
|
||||
}
|
||||
|
||||
private fun decryptAES(encrypted: String, password: String): String {
|
||||
val cipherData = context.decodeBase64(encrypted)
|
||||
val saltData = cipherData.copyOfRange(8, 16)
|
||||
val (key, iv) = generateKeyAndIV(
|
||||
keyLength = 32,
|
||||
ivLength = 16,
|
||||
iterations = 1,
|
||||
salt = saltData,
|
||||
password = password.toByteArray(StandardCharsets.UTF_8),
|
||||
md = MessageDigest.getInstance("MD5"),
|
||||
)
|
||||
val encryptedData = cipherData.copyOfRange(16, cipherData.size)
|
||||
val cipher = Cipher.getInstance("AES/CBC/PKCS5Padding")
|
||||
cipher.init(Cipher.DECRYPT_MODE, key, iv)
|
||||
return cipher.doFinal(encryptedData).toString(Charsets.UTF_8)
|
||||
}
|
||||
|
||||
@Suppress("SameParameterValue")
|
||||
private fun generateKeyAndIV(
|
||||
keyLength: Int,
|
||||
ivLength: Int,
|
||||
iterations: Int,
|
||||
salt: ByteArray,
|
||||
password: ByteArray,
|
||||
md: MessageDigest,
|
||||
): Pair<SecretKeySpec, IvParameterSpec> {
|
||||
val digestLength = md.digestLength
|
||||
val requiredLength = (keyLength + ivLength + digestLength - 1) / digestLength * digestLength
|
||||
val generatedData = ByteArray(requiredLength)
|
||||
var generatedLength = 0
|
||||
md.reset()
|
||||
while (generatedLength < keyLength + ivLength) {
|
||||
if (generatedLength > 0) {
|
||||
md.update(generatedData, generatedLength - digestLength, digestLength)
|
||||
}
|
||||
md.update(password)
|
||||
md.update(salt, 0, 8)
|
||||
md.digest(generatedData, generatedLength, digestLength)
|
||||
repeat(iterations - 1) {
|
||||
md.update(generatedData, generatedLength, digestLength)
|
||||
md.digest(generatedData, generatedLength, digestLength)
|
||||
}
|
||||
generatedLength += digestLength
|
||||
}
|
||||
|
||||
return SecretKeySpec(generatedData.copyOfRange(0, keyLength), "AES") to IvParameterSpec(
|
||||
if (ivLength > 0) {
|
||||
generatedData.copyOfRange(keyLength, keyLength + ivLength)
|
||||
} else byteArrayOf(),
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,159 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal abstract class ChanParser : MangaParser() {
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.ALPHABETICAL,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val domain = getDomain()
|
||||
val url = when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
if (offset != 0) {
|
||||
return emptyList()
|
||||
}
|
||||
"https://$domain/?do=search&subaction=search&story=${query.urlEncoded()}"
|
||||
}
|
||||
!tags.isNullOrEmpty() -> tags.joinToString(
|
||||
prefix = "https://$domain/tags/",
|
||||
postfix = "&n=${getSortKey2(sortOrder)}?offset=$offset",
|
||||
separator = "+",
|
||||
) { tag -> tag.key }
|
||||
else -> "https://$domain/${getSortKey(sortOrder)}?offset=$offset"
|
||||
}
|
||||
val doc = context.httpGet(url).parseHtml()
|
||||
val root = doc.body().selectFirst("div.main_fon")?.getElementById("content")
|
||||
?: parseFailed("Cannot find root")
|
||||
return root.select("div.content_row").mapNotNull { row ->
|
||||
val a = row.selectFirst("div.manga_row1")?.selectFirst("h2")?.selectFirst("a")
|
||||
?: return@mapNotNull null
|
||||
val href = a.relUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.inContextOf(a),
|
||||
altTitle = a.attr("title"),
|
||||
title = a.text().substringAfterLast('(').substringBeforeLast(')'),
|
||||
author = row.getElementsByAttributeValueStarting(
|
||||
"href",
|
||||
"/mangaka",
|
||||
).firstOrNull()?.text(),
|
||||
coverUrl = row.selectFirst("div.manga_images")?.selectFirst("img")
|
||||
?.absUrl("src").orEmpty(),
|
||||
tags = runCatching {
|
||||
row.selectFirst("div.genre")?.select("a")?.mapToSet {
|
||||
MangaTag(
|
||||
title = it.text().toTagName(),
|
||||
key = it.attr("href").substringAfterLast('/').urlEncoded(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}.getOrNull().orEmpty(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(manga.url.withDomain()).parseHtml()
|
||||
val root =
|
||||
doc.body().getElementById("dle-content") ?: parseFailed("Cannot find root")
|
||||
val dateFormat = SimpleDateFormat("yyyy-MM-dd", Locale.US)
|
||||
return manga.copy(
|
||||
description = root.getElementById("description")?.html()?.substringBeforeLast("<div"),
|
||||
largeCoverUrl = root.getElementById("cover")?.absUrl("src"),
|
||||
chapters = root.select("table.table_cha tr:gt(1)").reversed().mapIndexedNotNull { i, tr ->
|
||||
val href = tr?.selectFirst("a")?.relUrl("href") ?: return@mapIndexedNotNull null
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = tr.selectFirst("a")?.text().orEmpty(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
uploadDate = dateFormat.tryParse(tr.selectFirst("div.date")?.text()),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val doc = context.httpGet(fullUrl).parseHtml()
|
||||
val scripts = doc.select("script")
|
||||
for (script in scripts) {
|
||||
val data = script.html()
|
||||
val pos = data.indexOf("\"fullimg")
|
||||
if (pos == -1) {
|
||||
continue
|
||||
}
|
||||
val json = data.substring(pos).substringAfter('[').substringBefore(';')
|
||||
.substringBeforeLast(']')
|
||||
val domain = getDomain()
|
||||
return json.split(",").mapNotNull {
|
||||
it.trim()
|
||||
.removeSurrounding('"', '\'')
|
||||
.toRelativeUrl(domain)
|
||||
.takeUnless(String::isBlank)
|
||||
}.map { url ->
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
referer = fullUrl,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
throw ParseException("Pages list not found at ${chapter.url}")
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val domain = getDomain()
|
||||
val doc = context.httpGet("https://$domain/catalog").parseHtml()
|
||||
val root = doc.body().selectFirst("div.main_fon")?.getElementById("side")
|
||||
?.select("ul")?.last() ?: throw ParseException("Cannot find root")
|
||||
return root.select("li.sidetag").mapToSet { li ->
|
||||
val a = li.children().last() ?: throw ParseException("a is null")
|
||||
MangaTag(
|
||||
title = a.text().toTagName(),
|
||||
key = a.attr("href").substringAfterLast('/'),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun getSortKey(sortOrder: SortOrder?) =
|
||||
when (sortOrder ?: sortOrders.minByOrNull { it.ordinal }) {
|
||||
SortOrder.ALPHABETICAL -> "catalog"
|
||||
SortOrder.POPULARITY -> "mostfavorites"
|
||||
SortOrder.NEWEST -> "manga/new"
|
||||
else -> "mostfavorites"
|
||||
}
|
||||
|
||||
private fun getSortKey2(sortOrder: SortOrder?) =
|
||||
when (sortOrder ?: sortOrders.minByOrNull { it.ordinal }) {
|
||||
SortOrder.ALPHABETICAL -> "abcasc"
|
||||
SortOrder.POPULARITY -> "favdesc"
|
||||
SortOrder.NEWEST -> "datedesc"
|
||||
else -> "favdesc"
|
||||
}
|
||||
|
||||
private fun String.toTagName() = replace('_', ' ').toTitleCase()
|
||||
}
|
||||
@ -0,0 +1,216 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import androidx.collection.ArraySet
|
||||
import androidx.collection.SparseArrayCompat
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.JSONIterator
|
||||
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSONToSet
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
/**
|
||||
* https://api.comick.fun/docs/static/index.html
|
||||
*/
|
||||
|
||||
private const val PAGE_SIZE = 20
|
||||
private const val CHAPTERS_LIMIT = 99999
|
||||
|
||||
internal class ComickFunParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val defaultDomain = "comick.fun"
|
||||
override val source = MangaSource.COMICK_FUN
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.RATING,
|
||||
)
|
||||
|
||||
@Volatile
|
||||
private var cachedTags: SparseArrayCompat<MangaTag>? = null
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val domain = getDomain()
|
||||
val url = buildString {
|
||||
append("https://api.")
|
||||
append(domain)
|
||||
append("/search?tachiyomi=true")
|
||||
if (!query.isNullOrEmpty()) {
|
||||
if (offset > 0) {
|
||||
return emptyList()
|
||||
}
|
||||
append("&q=")
|
||||
append(query.urlEncoded())
|
||||
} else {
|
||||
append("&limit=")
|
||||
append(PAGE_SIZE)
|
||||
append("&page=")
|
||||
append((offset / PAGE_SIZE) + 1)
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("&genres=")
|
||||
appendAll(tags, "&genres=", MangaTag::key)
|
||||
}
|
||||
append("&sort=") // view, uploaded, rating, follow, user_follow_count
|
||||
append(
|
||||
when (sortOrder) {
|
||||
SortOrder.POPULARITY -> "view"
|
||||
SortOrder.RATING -> "rating"
|
||||
else -> "uploaded"
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
val ja = context.httpGet(url).parseJsonArray()
|
||||
val tagsMap = cachedTags ?: loadTags()
|
||||
return ja.mapJSON { jo ->
|
||||
val slug = jo.getString("slug")
|
||||
Manga(
|
||||
id = generateUid(slug),
|
||||
title = jo.getString("title"),
|
||||
altTitle = null,
|
||||
url = slug,
|
||||
publicUrl = "https://$domain/comic/$slug",
|
||||
rating = jo.optDouble("rating", -10.0).toFloat() / 10f,
|
||||
isNsfw = false,
|
||||
coverUrl = jo.getString("cover_url"),
|
||||
largeCoverUrl = null,
|
||||
description = jo.getStringOrNull("desc"),
|
||||
tags = jo.selectGenres("genres", tagsMap),
|
||||
state = runCatching {
|
||||
if (jo.getBoolean("translation_completed")) {
|
||||
MangaState.FINISHED
|
||||
} else {
|
||||
MangaState.ONGOING
|
||||
}
|
||||
}.getOrNull(),
|
||||
author = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val domain = getDomain()
|
||||
val url = "https://api.$domain/comic/${manga.url}?tachiyomi=true"
|
||||
val jo = context.httpGet(url).parseJson()
|
||||
val comic = jo.getJSONObject("comic")
|
||||
return manga.copy(
|
||||
title = comic.getString("title"),
|
||||
altTitle = null, // TODO
|
||||
isNsfw = jo.getBoolean("matureContent") || comic.getBoolean("hentai"),
|
||||
description = comic.getStringOrNull("parsed") ?: comic.getString("desc"),
|
||||
tags = manga.tags + jo.getJSONArray("genres").mapJSONToSet {
|
||||
MangaTag(
|
||||
title = it.getString("name"),
|
||||
key = it.getString("slug"),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
author = jo.getJSONArray("artists").optJSONObject(0)?.getString("name"),
|
||||
chapters = getChapters(comic.getLong("id")),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val jo = context.httpGet(
|
||||
"https://api.${getDomain()}/chapter/${chapter.url}?tachiyomi=true",
|
||||
).parseJson().getJSONObject("chapter")
|
||||
val referer = "https://${getDomain()}/"
|
||||
return jo.getJSONArray("images").mapJSON {
|
||||
val url = it.getString("url")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
referer = referer,
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val sparseArray = cachedTags ?: loadTags()
|
||||
val set = ArraySet<MangaTag>(sparseArray.size())
|
||||
for (i in 0 until sparseArray.size()) {
|
||||
set.add(sparseArray.valueAt(i))
|
||||
}
|
||||
return set
|
||||
}
|
||||
|
||||
private suspend fun loadTags(): SparseArrayCompat<MangaTag> {
|
||||
val ja = context.httpGet("https://api.${getDomain()}/genre").parseJsonArray()
|
||||
val tags = SparseArrayCompat<MangaTag>(ja.length())
|
||||
for (jo in ja.JSONIterator()) {
|
||||
tags.append(
|
||||
jo.getInt("id"),
|
||||
MangaTag(
|
||||
title = jo.getString("name"),
|
||||
key = jo.getString("slug"),
|
||||
source = source,
|
||||
),
|
||||
)
|
||||
}
|
||||
cachedTags = tags
|
||||
return tags
|
||||
}
|
||||
|
||||
private suspend fun getChapters(id: Long): List<MangaChapter> {
|
||||
val ja = context.httpGet(
|
||||
url = "https://api.${getDomain()}/comic/$id/chapter?tachiyomi=true&limit=$CHAPTERS_LIMIT",
|
||||
).parseJson().getJSONArray("chapters")
|
||||
val dateFormat = SimpleDateFormat("yyyy-MM-dd")
|
||||
val counters = HashMap<Locale, Int>()
|
||||
return ja.mapReversed { jo ->
|
||||
val locale = Locale.forLanguageTag(jo.getString("lang"))
|
||||
var number = counters[locale] ?: 0
|
||||
number++
|
||||
counters[locale] = number
|
||||
MangaChapter(
|
||||
id = generateUid(jo.getLong("id")),
|
||||
name = buildString {
|
||||
jo.getStringOrNull("vol")?.let { append("Vol ").append(it).append(' ') }
|
||||
jo.getStringOrNull("chap")?.let { append("Chap ").append(it) }
|
||||
jo.getStringOrNull("title")?.let { append(": ").append(it) }
|
||||
},
|
||||
number = number,
|
||||
url = jo.getString("hid"),
|
||||
scanlator = jo.optJSONArray("group_name")?.optString(0),
|
||||
uploadDate = dateFormat.tryParse(jo.getString("created_at").substringBefore('T')),
|
||||
branch = locale.getDisplayName(locale).toTitleCase(locale),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private inline fun <R> JSONArray.mapReversed(block: (JSONObject) -> R): List<R> {
|
||||
val len = length()
|
||||
val destination = ArrayList<R>(len)
|
||||
for (i in (0 until len).reversed()) {
|
||||
val jo = getJSONObject(i)
|
||||
destination.add(block(jo))
|
||||
}
|
||||
return destination
|
||||
}
|
||||
|
||||
private fun JSONObject.selectGenres(name: String, tags: SparseArrayCompat<MangaTag>): Set<MangaTag> {
|
||||
val array = optJSONArray(name) ?: return emptySet()
|
||||
val res = ArraySet<MangaTag>(array.length())
|
||||
for (i in 0 until array.length()) {
|
||||
val id = array.getInt(i)
|
||||
val tag = tags.get(id) ?: continue
|
||||
res.add(tag)
|
||||
}
|
||||
return res
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,152 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSONIndexed
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSONToSet
|
||||
import java.util.*
|
||||
|
||||
internal class DesuMeParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val source = MangaSource.DESUME
|
||||
|
||||
override val defaultDomain = "desu.me"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.ALPHABETICAL,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
if (query != null && offset != 0) {
|
||||
return emptyList()
|
||||
}
|
||||
val domain = getDomain()
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(domain)
|
||||
append("/manga/api/?limit=20&order=")
|
||||
append(getSortKey(sortOrder))
|
||||
append("&page=")
|
||||
append((offset / 20) + 1)
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
append("&genres=")
|
||||
appendAll(tags, ",") { it.key }
|
||||
}
|
||||
if (query != null) {
|
||||
append("&search=")
|
||||
append(query)
|
||||
}
|
||||
}
|
||||
val json = context.httpGet(url).parseJson().getJSONArray("response")
|
||||
?: throw ParseException("Invalid response")
|
||||
val total = json.length()
|
||||
val list = ArrayList<Manga>(total)
|
||||
for (i in 0 until total) {
|
||||
val jo = json.getJSONObject(i)
|
||||
val cover = jo.getJSONObject("image")
|
||||
val id = jo.getLong("id")
|
||||
list += Manga(
|
||||
url = "/manga/api/$id",
|
||||
publicUrl = jo.getString("url"),
|
||||
source = MangaSource.DESUME,
|
||||
title = jo.getString("russian"),
|
||||
altTitle = jo.getString("name"),
|
||||
coverUrl = cover.getString("preview"),
|
||||
largeCoverUrl = cover.getString("original"),
|
||||
state = when {
|
||||
jo.getInt("ongoing") == 1 -> MangaState.ONGOING
|
||||
else -> null
|
||||
},
|
||||
rating = jo.getDouble("score").toFloat().coerceIn(0f, 1f),
|
||||
id = generateUid(id),
|
||||
description = jo.getString("description"),
|
||||
)
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val url = manga.url.withDomain()
|
||||
val json = context.httpGet(url).parseJson().getJSONObject("response")
|
||||
?: throw ParseException("Invalid response")
|
||||
val baseChapterUrl = manga.url + "/chapter/"
|
||||
val chaptersList = json.getJSONObject("chapters").getJSONArray("list")
|
||||
val totalChapters = chaptersList.length()
|
||||
return manga.copy(
|
||||
tags = json.getJSONArray("genres").mapJSONToSet {
|
||||
MangaTag(
|
||||
key = it.getString("text"),
|
||||
title = it.getString("russian").toTitleCase(),
|
||||
source = manga.source,
|
||||
)
|
||||
},
|
||||
publicUrl = json.getString("url"),
|
||||
description = json.getString("description"),
|
||||
chapters = chaptersList.mapJSONIndexed { i, it ->
|
||||
val chid = it.getLong("id")
|
||||
val volChap = "Том " + it.optString("vol", "0") + ". " + "Глава " + it.optString("ch", "0")
|
||||
val title = it.optString("title", "null").takeUnless { it == "null" }
|
||||
MangaChapter(
|
||||
id = generateUid(chid),
|
||||
source = manga.source,
|
||||
url = "$baseChapterUrl$chid",
|
||||
uploadDate = it.getLong("date") * 1000,
|
||||
name = if (title.isNullOrEmpty()) volChap else "$volChap: $title",
|
||||
number = totalChapters - i,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}.reversed(),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val json = context.httpGet(fullUrl)
|
||||
.parseJson()
|
||||
.getJSONObject("response") ?: throw ParseException("Invalid response")
|
||||
return json.getJSONObject("pages").getJSONArray("list").mapJSON { jo ->
|
||||
MangaPage(
|
||||
id = generateUid(jo.getLong("id")),
|
||||
referer = fullUrl,
|
||||
preview = null,
|
||||
source = chapter.source,
|
||||
url = jo.getString("img"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = context.httpGet("https://${getDomain()}/manga/").parseHtml()
|
||||
val root = doc.body().getElementById("animeFilter")
|
||||
?.selectFirst(".catalog-genres") ?: throw ParseException("Root not found")
|
||||
return root.select("li").mapToSet {
|
||||
MangaTag(
|
||||
source = source,
|
||||
key = it.selectFirst("input")?.attr("data-genre") ?: parseFailed(),
|
||||
title = it.selectFirst("label")?.text()?.toTitleCase() ?: parseFailed(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun getSortKey(sortOrder: SortOrder?) =
|
||||
when (sortOrder) {
|
||||
SortOrder.ALPHABETICAL -> "name"
|
||||
SortOrder.POPULARITY -> "popular"
|
||||
SortOrder.UPDATED -> "updated"
|
||||
SortOrder.NEWEST -> "id"
|
||||
else -> "updated"
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,280 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.jsoup.nodes.Element
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
|
||||
import org.koitharu.kotatsu.parsers.exception.AuthRequiredException
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.util.*
|
||||
import kotlin.math.pow
|
||||
|
||||
private const val DOMAIN_UNAUTHORIZED = "e-hentai.org"
|
||||
private const val DOMAIN_AUTHORIZED = "exhentai.org"
|
||||
|
||||
internal class ExHentaiParser(override val context: MangaLoaderContext) : MangaParser(), MangaParserAuthProvider {
|
||||
|
||||
override val source = MangaSource.EXHENTAI
|
||||
|
||||
override val sortOrders: Set<SortOrder> = Collections.singleton(
|
||||
SortOrder.NEWEST,
|
||||
)
|
||||
|
||||
override val defaultDomain: String
|
||||
get() = if (isAuthorized) DOMAIN_AUTHORIZED else DOMAIN_UNAUTHORIZED
|
||||
|
||||
override val authUrl: String
|
||||
get() = "https://${getDomain()}/bounce_login.php"
|
||||
|
||||
private val ratingPattern = Regex("-?[0-9]+px")
|
||||
private val authCookies = arrayOf("ipb_member_id", "ipb_pass_hash")
|
||||
private var updateDm = false
|
||||
|
||||
override val isAuthorized: Boolean
|
||||
get() {
|
||||
val authorized = isAuthorized(DOMAIN_UNAUTHORIZED)
|
||||
if (authorized) {
|
||||
if (!isAuthorized(DOMAIN_AUTHORIZED)) {
|
||||
context.cookieJar.copyCookies(
|
||||
DOMAIN_UNAUTHORIZED,
|
||||
DOMAIN_AUTHORIZED,
|
||||
authCookies,
|
||||
)
|
||||
context.cookieJar.insertCookies(DOMAIN_AUTHORIZED, "yay=louder")
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
init {
|
||||
context.cookieJar.insertCookies(DOMAIN_AUTHORIZED, "nw=1", "sl=dm_2")
|
||||
context.cookieJar.insertCookies(DOMAIN_UNAUTHORIZED, "nw=1", "sl=dm_2")
|
||||
}
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val page = (offset / 25f).toIntUp()
|
||||
var search = query?.urlEncoded().orEmpty()
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(getDomain())
|
||||
append("/?page=")
|
||||
append(page)
|
||||
if (!tags.isNullOrEmpty()) {
|
||||
var fCats = 0
|
||||
for (tag in tags) {
|
||||
tag.key.toIntOrNull()?.let { fCats = fCats or it } ?: run {
|
||||
search += tag.key + " "
|
||||
}
|
||||
}
|
||||
if (fCats != 0) {
|
||||
append("&f_cats=")
|
||||
append(1023 - fCats)
|
||||
}
|
||||
}
|
||||
if (search.isNotEmpty()) {
|
||||
append("&f_search=")
|
||||
append(search.trim().replace(' ', '+'))
|
||||
}
|
||||
// by unknown reason cookie "sl=dm_2" is ignored, so, we should request it again
|
||||
if (updateDm) {
|
||||
append("&inline_set=dm_e")
|
||||
}
|
||||
}
|
||||
val body = context.httpGet(url).parseHtml().body()
|
||||
val root = body.selectFirst("table.itg")
|
||||
?.selectFirst("tbody")
|
||||
?: if (updateDm) {
|
||||
parseFailed("Cannot find root")
|
||||
} else {
|
||||
updateDm = true
|
||||
return getList(offset, query, tags, sortOrder)
|
||||
}
|
||||
updateDm = false
|
||||
return root.children().mapNotNull { tr ->
|
||||
if (tr.childrenSize() != 2) return@mapNotNull null
|
||||
val (td1, td2) = tr.children()
|
||||
val glink = td2.selectFirst("div.glink") ?: parseFailed("glink not found")
|
||||
val a = glink.parents().select("a").first() ?: parseFailed("link not found")
|
||||
val href = a.relUrl("href")
|
||||
val tagsDiv = glink.nextElementSibling() ?: parseFailed("tags div not found")
|
||||
val mainTag = td2.selectFirst("div.cn")?.let { div ->
|
||||
MangaTag(
|
||||
title = div.text().toTitleCase(),
|
||||
key = tagIdByClass(div.classNames()) ?: return@let null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = glink.text().cleanupTitle(),
|
||||
altTitle = null,
|
||||
url = href,
|
||||
publicUrl = a.absUrl("href"),
|
||||
rating = td2.selectFirst("div.ir")?.parseRating() ?: Manga.NO_RATING,
|
||||
isNsfw = true,
|
||||
coverUrl = td1.selectFirst("img")?.absUrl("src").orEmpty(),
|
||||
tags = setOfNotNull(mainTag),
|
||||
state = null,
|
||||
author = tagsDiv.getElementsContainingOwnText("artist:").first()
|
||||
?.nextElementSibling()?.text(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(manga.url.withDomain()).parseHtml()
|
||||
val root = doc.body().selectFirst("div.gm") ?: parseFailed("Cannot find root")
|
||||
val cover = root.getElementById("gd1")?.children()?.first()
|
||||
val title = root.getElementById("gd2")
|
||||
val taglist = root.getElementById("taglist")
|
||||
val tabs = doc.body().selectFirst("table.ptt")?.selectFirst("tr")
|
||||
return manga.copy(
|
||||
title = title?.getElementById("gn")?.text()?.cleanupTitle() ?: manga.title,
|
||||
altTitle = title?.getElementById("gj")?.text()?.cleanupTitle() ?: manga.altTitle,
|
||||
publicUrl = doc.baseUri().ifEmpty { manga.publicUrl },
|
||||
rating = root.getElementById("rating_label")?.text()
|
||||
?.substringAfterLast(' ')
|
||||
?.toFloatOrNull()
|
||||
?.div(5f) ?: manga.rating,
|
||||
largeCoverUrl = cover?.css("background")?.cssUrl(),
|
||||
description = taglist?.select("tr")?.joinToString("<br>") { tr ->
|
||||
val (tc, td) = tr.children()
|
||||
val subtags = td.select("a").joinToString { it.html() }
|
||||
"<b>${tc.html()}</b> $subtags"
|
||||
},
|
||||
chapters = tabs?.select("a")?.findLast { a ->
|
||||
a.text().toIntOrNull() != null
|
||||
}?.let { a ->
|
||||
val count = a.text().toInt()
|
||||
val chapters = ArrayList<MangaChapter>(count)
|
||||
for (i in 1..count) {
|
||||
val url = "${manga.url}?p=$i"
|
||||
chapters += MangaChapter(
|
||||
id = generateUid(url),
|
||||
name = "${manga.title} #$i",
|
||||
number = i,
|
||||
url = url,
|
||||
uploadDate = 0L,
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
chapters
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val doc = context.httpGet(chapter.url.withDomain()).parseHtml()
|
||||
val root = doc.body().getElementById("gdt") ?: parseFailed("Root not found")
|
||||
return root.select("a").mapNotNull { a ->
|
||||
val url = a.relUrl("href")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
referer = a.absUrl("href"),
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getPageUrl(page: MangaPage): String {
|
||||
val doc = context.httpGet(page.url.withDomain()).parseHtml()
|
||||
return doc.body().getElementById("img")?.absUrl("src")
|
||||
?: parseFailed("Image not found")
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = context.httpGet("https://${getDomain()}").parseHtml()
|
||||
val root = doc.body().getElementById("searchbox")?.selectFirst("table")
|
||||
?: parseFailed("Root not found")
|
||||
return root.select("div.cs").mapNotNullToSet { div ->
|
||||
val id = div.id().substringAfterLast('_').toIntOrNull()
|
||||
?: return@mapNotNullToSet null
|
||||
MangaTag(
|
||||
title = div.text().toTitleCase(),
|
||||
key = id.toString(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getUsername(): String {
|
||||
val doc = context.httpGet("https://forums.$DOMAIN_UNAUTHORIZED/").parseHtml().body()
|
||||
val username = doc.getElementById("userlinks")
|
||||
?.getElementsByAttributeValueContaining("href", "?showuser=")
|
||||
?.firstOrNull()
|
||||
?.ownText()
|
||||
?: if (doc.getElementById("userlinksguest") != null) {
|
||||
throw AuthRequiredException(source)
|
||||
} else {
|
||||
throw ParseException()
|
||||
}
|
||||
return username
|
||||
}
|
||||
|
||||
private fun isAuthorized(domain: String): Boolean {
|
||||
val cookies = context.cookieJar.getCookies(domain).mapToSet { x -> x.name }
|
||||
return authCookies.all { it in cookies }
|
||||
}
|
||||
|
||||
private fun Element.parseRating(): Float {
|
||||
return runCatching {
|
||||
val style = requireNotNull(attr("style"))
|
||||
val (v1, v2) = ratingPattern.find(style)!!.destructured
|
||||
var p1 = v1.dropLast(2).toInt()
|
||||
val p2 = v2.dropLast(2).toInt()
|
||||
if (p2 != -1) {
|
||||
p1 += 8
|
||||
}
|
||||
(80 - p1) / 80f
|
||||
}.getOrDefault(Manga.NO_RATING)
|
||||
}
|
||||
|
||||
private fun String.cleanupTitle(): String {
|
||||
val result = StringBuilder(length)
|
||||
var skip = false
|
||||
for (c in this) {
|
||||
when {
|
||||
c == '[' -> skip = true
|
||||
c == ']' -> skip = false
|
||||
c.isWhitespace() && result.isEmpty() -> continue
|
||||
!skip -> result.append(c)
|
||||
}
|
||||
}
|
||||
while (result.lastOrNull()?.isWhitespace() == true) {
|
||||
result.deleteCharAt(result.lastIndex)
|
||||
}
|
||||
return result.toString()
|
||||
}
|
||||
|
||||
private fun String.cssUrl(): String? {
|
||||
val fromIndex = indexOf("url(")
|
||||
if (fromIndex == -1) {
|
||||
return null
|
||||
}
|
||||
val toIndex = indexOf(')', startIndex = fromIndex)
|
||||
return if (toIndex == -1) {
|
||||
null
|
||||
} else {
|
||||
substring(fromIndex + 4, toIndex).trim()
|
||||
}
|
||||
}
|
||||
|
||||
private fun tagIdByClass(classNames: Collection<String>): String? {
|
||||
val className = classNames.find { x -> x.startsWith("ct") } ?: return null
|
||||
val num = className.drop(2).toIntOrNull(16) ?: return null
|
||||
return 2.0.pow(num).toInt().toString()
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,248 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import okhttp3.Headers
|
||||
import okhttp3.HttpUrl.Companion.toHttpUrl
|
||||
import okhttp3.Response
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
private const val PAGE_SIZE = 70
|
||||
private const val PAGE_SIZE_SEARCH = 50
|
||||
private const val NSFW_ALERT = "сексуальные сцены"
|
||||
|
||||
internal abstract class GroupleParser : MangaParser() {
|
||||
|
||||
private val headers = Headers.Builder()
|
||||
.add("User-Agent", "readmangafun")
|
||||
.build()
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.RATING,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val domain = getDomain()
|
||||
val doc = when {
|
||||
!query.isNullOrEmpty() -> context.httpPost(
|
||||
"https://$domain/search",
|
||||
mapOf(
|
||||
"q" to query.urlEncoded(),
|
||||
"offset" to (offset upBy PAGE_SIZE_SEARCH).toString(),
|
||||
),
|
||||
)
|
||||
tags.isNullOrEmpty() -> context.httpGet(
|
||||
"https://$domain/list?sortType=${
|
||||
getSortKey(
|
||||
sortOrder,
|
||||
)
|
||||
}&offset=${offset upBy PAGE_SIZE}",
|
||||
headers,
|
||||
)
|
||||
tags.size == 1 -> context.httpGet(
|
||||
"https://$domain/list/genre/${tags.first().key}?sortType=${
|
||||
getSortKey(
|
||||
sortOrder,
|
||||
)
|
||||
}&offset=${offset upBy PAGE_SIZE}",
|
||||
headers,
|
||||
)
|
||||
offset > 0 -> return emptyList()
|
||||
else -> advancedSearch(domain, tags)
|
||||
}.parseHtml().body()
|
||||
val root = (doc.getElementById("mangaBox") ?: doc.getElementById("mangaResults"))
|
||||
?.selectFirst("div.tiles.row") ?: throw ParseException("Cannot find root")
|
||||
val baseHost = root.baseUri().toHttpUrl().host
|
||||
return root.select("div.tile").mapNotNull { node ->
|
||||
val imgDiv = node.selectFirst("div.img") ?: return@mapNotNull null
|
||||
val descDiv = node.selectFirst("div.desc") ?: return@mapNotNull null
|
||||
if (descDiv.selectFirst("i.fa-user") != null) {
|
||||
return@mapNotNull null // skip author
|
||||
}
|
||||
val href = imgDiv.selectFirst("a")?.attr("href")?.inContextOf(node)
|
||||
if (href == null || href.toHttpUrl().host != baseHost) {
|
||||
return@mapNotNull null // skip external links
|
||||
}
|
||||
val title = descDiv.selectFirst("h3")?.selectFirst("a")?.text()
|
||||
?: return@mapNotNull null
|
||||
val tileInfo = descDiv.selectFirst("div.tile-info")
|
||||
val relUrl = href.toRelativeUrl(baseHost)
|
||||
Manga(
|
||||
id = generateUid(relUrl),
|
||||
url = relUrl,
|
||||
publicUrl = href,
|
||||
title = title,
|
||||
altTitle = descDiv.selectFirst("h4")?.text(),
|
||||
coverUrl = imgDiv.selectFirst("img.lazy")?.attr("data-original").orEmpty(),
|
||||
rating = runCatching {
|
||||
node.selectFirst("div.rating")
|
||||
?.attr("title")
|
||||
?.substringBefore(' ')
|
||||
?.toFloatOrNull()
|
||||
?.div(10f)
|
||||
}.getOrNull() ?: Manga.NO_RATING,
|
||||
author = tileInfo?.selectFirst("a.person-link")?.text(),
|
||||
tags = runCatching {
|
||||
tileInfo?.select("a.element-link")
|
||||
?.mapToSet {
|
||||
MangaTag(
|
||||
title = it.text().toTitleCase(),
|
||||
key = it.attr("href").substringAfterLast('/'),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}.getOrNull().orEmpty(),
|
||||
state = when {
|
||||
node.selectFirst("div.tags")
|
||||
?.selectFirst("span.mangaCompleted") != null -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(manga.url.withDomain(), headers).parseHtml()
|
||||
val root = doc.body().getElementById("mangaBox")?.selectFirst("div.leftContent")
|
||||
?: throw ParseException("Cannot find root")
|
||||
val dateFormat = SimpleDateFormat("dd.MM.yy", Locale.US)
|
||||
val coverImg = root.selectFirst("div.subject-cover")?.selectFirst("img")
|
||||
return manga.copy(
|
||||
description = root.selectFirst("div.manga-description")?.html(),
|
||||
largeCoverUrl = coverImg?.attr("data-full"),
|
||||
coverUrl = coverImg?.attr("data-thumb") ?: manga.coverUrl,
|
||||
tags = manga.tags + root.select("div.subject-meta").select("span.elem_genre ")
|
||||
.mapNotNull {
|
||||
val a = it.selectFirst("a.element-link") ?: return@mapNotNull null
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = a.attr("href").substringAfterLast('/'),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
isNsfw = root.select(".alert-warning").any { it.ownText().contains(NSFW_ALERT) },
|
||||
chapters = root.selectFirst("div.chapters-link")?.selectFirst("table")
|
||||
?.select("tr:has(td > a)")?.asReversed()?.mapIndexedNotNull { i, tr ->
|
||||
val a = tr.selectFirst("a") ?: return@mapIndexedNotNull null
|
||||
val href = a.relUrl("href")
|
||||
var translators = ""
|
||||
val translatorElement = a.attr("title")
|
||||
if (!translatorElement.isNullOrBlank()) {
|
||||
translators = translatorElement
|
||||
.replace("(Переводчик),", "&")
|
||||
.removeSuffix(" (Переводчик)")
|
||||
}
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = tr.selectFirst("a")?.text().orEmpty().removePrefix(manga.title).trim(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = dateFormat.tryParse(tr.selectFirst("td.d-none")?.text()),
|
||||
scanlator = translators,
|
||||
source = source,
|
||||
branch = null,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val doc = context.httpGet(chapter.url.withDomain() + "?mtr=1", headers).parseHtml()
|
||||
val scripts = doc.select("script")
|
||||
for (script in scripts) {
|
||||
val data = script.html()
|
||||
val pos = data.indexOf("rm_h.init")
|
||||
if (pos == -1) {
|
||||
continue
|
||||
}
|
||||
val json = data.substring(pos).substringAfter('[').substringBeforeLast(']')
|
||||
val matches = Regex("\\[.*?]").findAll(json).toList()
|
||||
val regex = Regex("['\"].*?['\"]")
|
||||
return matches.map { x ->
|
||||
val parts = regex.findAll(x.value).toList()
|
||||
val url = parts[0].value.removeSurrounding('"', '\'') +
|
||||
parts[2].value.removeSurrounding('"', '\'')
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
referer = chapter.url,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
throw ParseException("Pages list not found at ${chapter.url}")
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = context.httpGet("https://${getDomain()}/list/genres/sort_name", headers).parseHtml()
|
||||
val root = doc.body().getElementById("mangaBox")?.selectFirst("div.leftContent")
|
||||
?.selectFirst("table.table") ?: parseFailed("Cannot find root")
|
||||
return root.select("a.element-link").mapToSet { a ->
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = a.attr("href").substringAfterLast('/'),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun getSortKey(sortOrder: SortOrder?) =
|
||||
when (sortOrder ?: sortOrders.minByOrNull { it.ordinal }) {
|
||||
SortOrder.ALPHABETICAL -> "name"
|
||||
SortOrder.POPULARITY -> "rate"
|
||||
SortOrder.UPDATED -> "updated"
|
||||
SortOrder.NEWEST -> "created"
|
||||
SortOrder.RATING -> "votes"
|
||||
null -> "updated"
|
||||
}
|
||||
|
||||
private suspend fun advancedSearch(domain: String, tags: Set<MangaTag>): Response {
|
||||
val url = "https://$domain/search/advanced"
|
||||
// Step 1: map catalog genres names to advanced-search genres ids
|
||||
val tagsIndex = context.httpGet(url, headers).parseHtml()
|
||||
.body().selectFirst("form.search-form")
|
||||
?.select("div.form-group")
|
||||
?.get(1) ?: parseFailed("Genres filter element not found")
|
||||
val tagNames = tags.map { it.title.lowercase() }
|
||||
val payload = HashMap<String, String>()
|
||||
var foundGenres = 0
|
||||
tagsIndex.select("li.property").forEach { li ->
|
||||
val name = li.text().trim().lowercase()
|
||||
val id = li.selectFirst("input")?.id()
|
||||
?: parseFailed("Id for tag $name not found")
|
||||
payload[id] = if (name in tagNames) {
|
||||
foundGenres++
|
||||
"in"
|
||||
} else ""
|
||||
}
|
||||
if (foundGenres != tags.size) {
|
||||
parseFailed("Some genres are not found")
|
||||
}
|
||||
// Step 2: advanced search
|
||||
payload["q"] = ""
|
||||
payload["s_high_rate"] = ""
|
||||
payload["s_single"] = ""
|
||||
payload["s_mature"] = ""
|
||||
payload["s_completed"] = ""
|
||||
payload["s_translated"] = ""
|
||||
payload["s_many_chapters"] = ""
|
||||
payload["s_wait_upload"] = ""
|
||||
payload["s_sale"] = ""
|
||||
payload["years"] = "1900,2099"
|
||||
payload["+"] = "Искать".urlEncoded()
|
||||
return context.httpPost(url, payload)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,59 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.mapToSet
|
||||
import org.koitharu.kotatsu.parsers.util.parseHtml
|
||||
import org.koitharu.kotatsu.parsers.util.toTitleCase
|
||||
|
||||
internal class HenChanParser(override val context: MangaLoaderContext) : ChanParser() {
|
||||
|
||||
override val defaultDomain = "hentaichan.live"
|
||||
override val source = MangaSource.HENCHAN
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
return super.getList(offset, query, tags, sortOrder).map {
|
||||
it.copy(
|
||||
coverUrl = it.coverUrl.replace("_blur", ""),
|
||||
isNsfw = true,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(manga.url.withDomain()).parseHtml()
|
||||
val root =
|
||||
doc.body().getElementById("dle-content") ?: throw ParseException("Cannot find root")
|
||||
val readLink = manga.url.replace("manga", "online")
|
||||
return manga.copy(
|
||||
description = root.getElementById("description")?.html()?.substringBeforeLast("<div"),
|
||||
largeCoverUrl = root.getElementById("cover")?.absUrl("src"),
|
||||
tags = root.selectFirst("div.sidetags")?.select("li.sidetag")?.mapToSet {
|
||||
val a = it.children().last() ?: parseFailed("Invalid tag")
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = a.attr("href").substringAfterLast('/'),
|
||||
source = source,
|
||||
)
|
||||
} ?: manga.tags,
|
||||
chapters = listOf(
|
||||
MangaChapter(
|
||||
id = generateUid(readLink),
|
||||
url = readLink,
|
||||
source = source,
|
||||
number = 1,
|
||||
uploadDate = 0L,
|
||||
name = manga.title,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,14 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
|
||||
internal class HentaiLibParser(context: MangaLoaderContext) : MangaLibParser(context) {
|
||||
|
||||
override val defaultDomain = "hentailib.me"
|
||||
|
||||
override val source = MangaSource.HENTAILIB
|
||||
|
||||
override fun isNsfw(doc: Document) = true
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
|
||||
internal class MangaChanParser(override val context: MangaLoaderContext) : ChanParser() {
|
||||
|
||||
override val defaultDomain = "manga-chan.me"
|
||||
override val source = MangaSource.MANGACHAN
|
||||
}
|
||||
@ -0,0 +1,212 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import org.json.JSONObject
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
private const val PAGE_SIZE = 20
|
||||
private const val CONTENT_RATING =
|
||||
"contentRating[]=safe&contentRating[]=suggestive&contentRating[]=erotica&contentRating[]=pornographic"
|
||||
private const val LOCALE_FALLBACK = "en"
|
||||
|
||||
internal class MangaDexParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val source = MangaSource.MANGADEX
|
||||
override val defaultDomain = "mangadex.org"
|
||||
|
||||
override val sortOrders: EnumSet<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val domain = getDomain()
|
||||
val url = buildString {
|
||||
append("https://api.")
|
||||
append(domain)
|
||||
append("/manga?limit=")
|
||||
append(PAGE_SIZE)
|
||||
append("&offset=")
|
||||
append(offset)
|
||||
append("&includes[]=cover_art&includes[]=author&includes[]=artist&")
|
||||
tags?.forEach { tag ->
|
||||
append("includedTags[]=")
|
||||
append(tag.key)
|
||||
append('&')
|
||||
}
|
||||
if (!query.isNullOrEmpty()) {
|
||||
append("title=")
|
||||
append(query.urlEncoded())
|
||||
append('&')
|
||||
}
|
||||
append(CONTENT_RATING)
|
||||
append("&order")
|
||||
append(
|
||||
when (sortOrder) {
|
||||
null,
|
||||
SortOrder.UPDATED,
|
||||
-> "[latestUploadedChapter]=desc"
|
||||
SortOrder.ALPHABETICAL -> "[title]=asc"
|
||||
SortOrder.NEWEST -> "[createdAt]=desc"
|
||||
SortOrder.POPULARITY -> "[followedCount]=desc"
|
||||
else -> "[followedCount]=desc"
|
||||
},
|
||||
)
|
||||
}
|
||||
val json = context.httpGet(url).parseJson().getJSONArray("data")
|
||||
return json.mapJSON { jo ->
|
||||
val id = jo.getString("id")
|
||||
val attrs = jo.getJSONObject("attributes")
|
||||
val relations = jo.getJSONArray("relationships").associateByKey("type")
|
||||
val cover = relations["cover_art"]
|
||||
?.getJSONObject("attributes")
|
||||
?.getString("fileName")
|
||||
?.let {
|
||||
"https://uploads.$domain/covers/$id/$it"
|
||||
}
|
||||
Manga(
|
||||
id = generateUid(id),
|
||||
title = requireNotNull(attrs.getJSONObject("title").selectByLocale()) {
|
||||
"Title should not be null"
|
||||
},
|
||||
altTitle = attrs.optJSONObject("altTitles")?.selectByLocale(),
|
||||
url = id,
|
||||
publicUrl = "https://$domain/title/$id",
|
||||
rating = Manga.NO_RATING,
|
||||
isNsfw = attrs.getStringOrNull("contentRating") == "erotica",
|
||||
coverUrl = cover?.plus(".256.jpg").orEmpty(),
|
||||
largeCoverUrl = cover,
|
||||
description = attrs.optJSONObject("description")?.selectByLocale(),
|
||||
tags = attrs.getJSONArray("tags").mapJSONToSet { tag ->
|
||||
MangaTag(
|
||||
title = tag.getJSONObject("attributes")
|
||||
.getJSONObject("name")
|
||||
.firstStringValue()
|
||||
.toTitleCase(),
|
||||
key = tag.getString("id"),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
state = when (jo.getStringOrNull("status")) {
|
||||
"ongoing" -> MangaState.ONGOING
|
||||
"completed" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
author = (relations["author"] ?: relations["artist"])
|
||||
?.getJSONObject("attributes")
|
||||
?.getStringOrNull("name"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga = coroutineScope<Manga> {
|
||||
val domain = getDomain()
|
||||
val attrsDeferred = async {
|
||||
context.httpGet(
|
||||
"https://api.$domain/manga/${manga.url}?includes[]=artist&includes[]=author&includes[]=cover_art",
|
||||
).parseJson().getJSONObject("data").getJSONObject("attributes")
|
||||
}
|
||||
val feedDeferred = async {
|
||||
val url = buildString {
|
||||
append("https://api.")
|
||||
append(domain)
|
||||
append("/manga/")
|
||||
append(manga.url)
|
||||
append("/feed")
|
||||
append("?limit=96&includes[]=scanlation_group&order[volume]=asc&order[chapter]=asc&offset=0&")
|
||||
append(CONTENT_RATING)
|
||||
}
|
||||
context.httpGet(url).parseJson().getJSONArray("data")
|
||||
}
|
||||
val mangaAttrs = attrsDeferred.await()
|
||||
val feed = feedDeferred.await()
|
||||
// 2022-01-02T00:27:11+00:00
|
||||
val dateFormat = SimpleDateFormat(
|
||||
"yyyy-MM-dd'T'HH:mm:ss'+00:00'",
|
||||
Locale.ROOT,
|
||||
)
|
||||
manga.copy(
|
||||
description = mangaAttrs.getJSONObject("description").selectByLocale()
|
||||
?: manga.description,
|
||||
chapters = feed.mapJSONNotNull { jo ->
|
||||
val id = jo.getString("id")
|
||||
val attrs = jo.getJSONObject("attributes")
|
||||
if (!attrs.isNull("externalUrl")) {
|
||||
return@mapJSONNotNull null
|
||||
}
|
||||
val locale = Locale.forLanguageTag(attrs.getString("translatedLanguage"))
|
||||
val relations = jo.getJSONArray("relationships").associateByKey("type")
|
||||
val number = attrs.optInt("chapter", 0)
|
||||
MangaChapter(
|
||||
id = generateUid(id),
|
||||
name = attrs.getStringOrNull("title")?.takeUnless(String::isEmpty)
|
||||
?: "Chapter #$number",
|
||||
number = number,
|
||||
url = id,
|
||||
scanlator = relations["scanlation_group"]?.getStringOrNull("name"),
|
||||
uploadDate = dateFormat.tryParse(attrs.getString("publishAt")),
|
||||
branch = locale.getDisplayName(locale).toTitleCase(locale),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val domain = getDomain()
|
||||
val chapter = context.httpGet("https://api.$domain/at-home/server/${chapter.url}?forcePort443=false")
|
||||
.parseJson()
|
||||
.getJSONObject("chapter")
|
||||
val pages = chapter.getJSONArray("data")
|
||||
val prefix = "https://uploads.$domain/data/${chapter.getString("hash")}/"
|
||||
val referer = "https://$domain/"
|
||||
return List(pages.length()) { i ->
|
||||
val url = prefix + pages.getString(i)
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
referer = referer,
|
||||
preview = null, // TODO prefix + dataSaver.getString(i),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val tags = context.httpGet("https://api.${getDomain()}/manga/tag").parseJson()
|
||||
.getJSONArray("data")
|
||||
return tags.mapJSONToSet { jo ->
|
||||
MangaTag(
|
||||
title = jo.getJSONObject("attributes").getJSONObject("name").firstStringValue().toTitleCase(),
|
||||
key = jo.getString("id"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun JSONObject.firstStringValue() = values().next() as String
|
||||
|
||||
private fun JSONObject.selectByLocale(): String? {
|
||||
val preferredLocales = context.getPreferredLocales()
|
||||
for (locale in preferredLocales) {
|
||||
getStringOrNull(locale.language)?.let { return it }
|
||||
getStringOrNull(locale.toLanguageTag())?.let { return it }
|
||||
}
|
||||
return getStringOrNull(LOCALE_FALLBACK) ?: values().nextOrNull() as? String
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,277 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import androidx.collection.ArraySet
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import org.jsoup.nodes.Document
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
|
||||
import org.koitharu.kotatsu.parsers.exception.AuthRequiredException
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.JSONIterator
|
||||
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal open class MangaLibParser(override val context: MangaLoaderContext) : MangaParser(), MangaParserAuthProvider {
|
||||
|
||||
override val defaultDomain = "mangalib.me"
|
||||
|
||||
override val source = MangaSource.MANGALIB
|
||||
|
||||
override val authUrl: String
|
||||
get() = "https://${getDomain()}/login"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.RATING,
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.NEWEST,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
if (!query.isNullOrEmpty()) {
|
||||
return if (offset == 0) search(query) else emptyList()
|
||||
}
|
||||
val page = (offset / 60f).toIntUp()
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(getDomain())
|
||||
append("/manga-list?dir=")
|
||||
append(getSortKey(sortOrder))
|
||||
append("&page=")
|
||||
append(page)
|
||||
tags?.forEach { tag ->
|
||||
append("&genres[include][]=")
|
||||
append(tag.key)
|
||||
}
|
||||
}
|
||||
val doc = context.httpGet(url).parseHtml()
|
||||
val root = doc.body().getElementById("manga-list") ?: throw ParseException("Root not found")
|
||||
val items = root.selectFirst("div.media-cards-grid")?.select("div.media-card-wrap")
|
||||
?: return emptyList()
|
||||
return items.mapNotNull { card ->
|
||||
val a = card.selectFirst("a.media-card") ?: return@mapNotNull null
|
||||
val href = a.relUrl("href")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = card.selectFirst("h3")?.text().orEmpty(),
|
||||
coverUrl = a.absUrl("data-src"),
|
||||
altTitle = null,
|
||||
author = null,
|
||||
rating = Manga.NO_RATING,
|
||||
url = href,
|
||||
publicUrl = href.inContextOf(a),
|
||||
tags = emptySet(),
|
||||
state = null,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val fullUrl = manga.url.withDomain()
|
||||
val doc = context.httpGet("$fullUrl?section=info").parseHtml()
|
||||
val root = doc.body().getElementById("main-page") ?: throw ParseException("Root not found")
|
||||
val title = root.selectFirst("div.media-header__wrap")?.children()
|
||||
val info = root.selectFirst("div.media-content")
|
||||
val chaptersDoc = context.httpGet("$fullUrl?section=chapters").parseHtml()
|
||||
val scripts = chaptersDoc.select("script")
|
||||
val dateFormat = SimpleDateFormat("yyy-MM-dd", Locale.US)
|
||||
var chapters: ArrayList<MangaChapter>? = null
|
||||
scripts@ for (script in scripts) {
|
||||
val raw = script.html().lines()
|
||||
for (line in raw) {
|
||||
if (line.startsWith("window.__DATA__")) {
|
||||
val json = JSONObject(line.substringAfter('=').substringBeforeLast(';'))
|
||||
val list = json.getJSONObject("chapters").getJSONArray("list")
|
||||
val total = list.length()
|
||||
chapters = ArrayList(total)
|
||||
for (i in 0 until total) {
|
||||
val item = list.getJSONObject(i)
|
||||
val chapterId = item.getLong("chapter_id")
|
||||
val scanlator = item.getStringOrNull("username")
|
||||
val url = buildString {
|
||||
append(manga.url)
|
||||
append("/v")
|
||||
append(item.getInt("chapter_volume"))
|
||||
append("/c")
|
||||
append(item.getString("chapter_number"))
|
||||
@Suppress("BlockingMethodInNonBlockingContext") // lint issue
|
||||
append('/')
|
||||
append(item.optString("chapter_string"))
|
||||
}
|
||||
val nameChapter = item.getStringOrNull("chapter_name")
|
||||
val volume = item.getInt("chapter_volume")
|
||||
val number = item.getString("chapter_number")
|
||||
val fullNameChapter = "Том $volume. Глава $number"
|
||||
chapters.add(
|
||||
MangaChapter(
|
||||
id = generateUid(chapterId),
|
||||
url = url,
|
||||
source = source,
|
||||
number = total - i,
|
||||
uploadDate = dateFormat.tryParse(
|
||||
item.getString("chapter_created_at").substringBefore(" "),
|
||||
),
|
||||
scanlator = scanlator,
|
||||
branch = null,
|
||||
name = if (nameChapter.isNullOrBlank()) fullNameChapter else "$fullNameChapter - $nameChapter",
|
||||
),
|
||||
)
|
||||
}
|
||||
chapters.reverse()
|
||||
break@scripts
|
||||
}
|
||||
}
|
||||
}
|
||||
return manga.copy(
|
||||
title = title?.getOrNull(0)?.text()?.takeUnless(String::isBlank) ?: manga.title,
|
||||
altTitle = title?.getOrNull(1)?.text()?.substringBefore('/')?.trim(),
|
||||
rating = root.selectFirst("div.media-stats-item__score")
|
||||
?.selectFirst("span")
|
||||
?.text()?.toFloatOrNull()?.div(5f) ?: manga.rating,
|
||||
author = info?.getElementsMatchingOwnText("Автор")?.firstOrNull()
|
||||
?.nextElementSibling()?.text() ?: manga.author,
|
||||
tags = info?.selectFirst("div.media-tags")
|
||||
?.select("a.media-tag-item")?.mapToSet { a ->
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = a.attr("href").substringAfterLast('='),
|
||||
source = source,
|
||||
)
|
||||
} ?: manga.tags,
|
||||
isNsfw = isNsfw(doc),
|
||||
description = info?.selectFirst("div.media-description__text")?.html(),
|
||||
chapters = chapters,
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val doc = context.httpGet(fullUrl).parseHtml()
|
||||
if (doc.location().endsWith("/register")) {
|
||||
throw AuthRequiredException(source)
|
||||
}
|
||||
val scripts = doc.head().select("script")
|
||||
val pg = (doc.body().getElementById("pg")?.html() ?: parseFailed("Element #pg not found"))
|
||||
.substringAfter('=')
|
||||
.substringBeforeLast(';')
|
||||
val pages = JSONArray(pg)
|
||||
for (script in scripts) {
|
||||
val raw = script.html().trim()
|
||||
if (raw.contains("window.__info")) {
|
||||
val json = JSONObject(
|
||||
raw.substringAfter("window.__info")
|
||||
.substringAfter('=')
|
||||
.substringBeforeLast(';'),
|
||||
)
|
||||
val domain = json.getJSONObject("servers").run {
|
||||
getStringOrNull("main") ?: getString(
|
||||
json.getJSONObject("img").getString("server"),
|
||||
)
|
||||
}
|
||||
val url = json.getJSONObject("img").getString("url")
|
||||
return pages.mapJSON { x ->
|
||||
val pageUrl = "$domain/$url${x.getString("u")}"
|
||||
MangaPage(
|
||||
id = generateUid(pageUrl),
|
||||
url = pageUrl,
|
||||
preview = null,
|
||||
referer = fullUrl,
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
throw ParseException("Script with info not found")
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val url = "https://${getDomain()}/manga-list"
|
||||
val doc = context.httpGet(url).parseHtml()
|
||||
val scripts = doc.body().select("script")
|
||||
for (script in scripts) {
|
||||
val raw = script.html().trim()
|
||||
if (raw.startsWith("window.__DATA")) {
|
||||
val json = JSONObject(raw.substringAfter('=').substringBeforeLast(';'))
|
||||
val genres = json.getJSONObject("filters").getJSONArray("genres")
|
||||
val result = ArraySet<MangaTag>(genres.length())
|
||||
for (x in genres.JSONIterator()) {
|
||||
result += MangaTag(
|
||||
source = source,
|
||||
key = x.getInt("id").toString(),
|
||||
title = x.getString("name").toTitleCase(),
|
||||
)
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
throw ParseException("Script with genres not found")
|
||||
}
|
||||
|
||||
override val isAuthorized: Boolean
|
||||
get() {
|
||||
return context.cookieJar.getCookies(getDomain()).any {
|
||||
it.name.startsWith("remember_web_")
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getUsername(): String {
|
||||
val body = context.httpGet("https://${getDomain()}/messages").parseHtml().body()
|
||||
if (body.baseUri().endsWith("/login")) {
|
||||
throw AuthRequiredException(source)
|
||||
}
|
||||
return body.selectFirst(".profile-user__username")?.text() ?: parseFailed("Cannot find username")
|
||||
}
|
||||
|
||||
protected open fun isNsfw(doc: Document): Boolean {
|
||||
val sidebar = doc.body().selectFirst(".media-sidebar") ?: parseFailed("Sidebar not found")
|
||||
return sidebar.getElementsContainingOwnText("18+").isNotEmpty()
|
||||
}
|
||||
|
||||
private fun getSortKey(sortOrder: SortOrder?) = when (sortOrder) {
|
||||
SortOrder.RATING -> "desc&sort=rate"
|
||||
SortOrder.ALPHABETICAL -> "asc&sort=name"
|
||||
SortOrder.POPULARITY -> "desc&sort=views"
|
||||
SortOrder.UPDATED -> "desc&sort=last_chapter_at"
|
||||
SortOrder.NEWEST -> "desc&sort=created_at"
|
||||
else -> "desc&sort=last_chapter_at"
|
||||
}
|
||||
|
||||
private suspend fun search(query: String): List<Manga> {
|
||||
val domain = getDomain()
|
||||
val json = context.httpGet("https://$domain/search?type=manga&q=$query")
|
||||
.parseJsonArray()
|
||||
return json.mapJSON { jo ->
|
||||
val slug = jo.getString("slug")
|
||||
val url = "/$slug"
|
||||
val covers = jo.getJSONObject("covers")
|
||||
Manga(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
publicUrl = "https://$domain/$slug",
|
||||
title = jo.getString("rus_name"),
|
||||
altTitle = jo.getString("name"),
|
||||
author = null,
|
||||
tags = emptySet(),
|
||||
rating = jo.getString("rate_avg")
|
||||
.toFloatOrNull()?.div(5f) ?: Manga.NO_RATING,
|
||||
state = null,
|
||||
source = source,
|
||||
coverUrl = covers.getString("thumbnail"),
|
||||
largeCoverUrl = covers.getString("default"),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,170 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal class MangaOwlParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val source = MangaSource.MANGAOWL
|
||||
|
||||
override val defaultDomain = "mangaowls.com"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.UPDATED,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val page = (offset / 36f).toIntUp().inc()
|
||||
val link = buildString {
|
||||
append("https://")
|
||||
append(getDomain())
|
||||
when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
append("/search/$page?search=")
|
||||
append(query.urlEncoded())
|
||||
}
|
||||
!tags.isNullOrEmpty() -> {
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
}
|
||||
append("/$page?type=${getAlternativeSortKey(sortOrder)}")
|
||||
}
|
||||
else -> {
|
||||
append("/${getSortKey(sortOrder)}/$page")
|
||||
}
|
||||
}
|
||||
}
|
||||
val doc = context.httpGet(link).parseHtml()
|
||||
val slides = doc.body().select("ul.slides") ?: parseFailed("An error occurred while parsing")
|
||||
val items = slides.select("div.col-md-2")
|
||||
return items.mapNotNull { item ->
|
||||
val href = item.selectFirst("h6 a")?.relUrl("href") ?: return@mapNotNull null
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = item.selectFirst("h6 a")?.text() ?: return@mapNotNull null,
|
||||
coverUrl = item.select("div.img-responsive").attr("abs:data-background-image"),
|
||||
altTitle = null,
|
||||
author = null,
|
||||
rating = runCatching {
|
||||
item.selectFirst("div.block-stars")
|
||||
?.text()
|
||||
?.toFloatOrNull()
|
||||
?.div(10f)
|
||||
}.getOrNull() ?: Manga.NO_RATING,
|
||||
url = href,
|
||||
publicUrl = href.withDomain(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(manga.publicUrl).parseHtml()
|
||||
val info = doc.body().selectFirst("div.single_detail") ?: parseFailed("An error occurred while parsing")
|
||||
val table = doc.body().selectFirst("div.single-grid-right") ?: parseFailed("An error occurred while parsing")
|
||||
val dateFormat = SimpleDateFormat("MM/dd/yyyy", Locale.US)
|
||||
val trRegex = "window\\['tr'] = '([^']*)';".toRegex(RegexOption.IGNORE_CASE)
|
||||
val trElement =
|
||||
doc.getElementsByTag("script").find { trRegex.find(it.data()) != null } ?: parseFailed("Oops, tr not found")
|
||||
val tr = trRegex.find(trElement.data())!!.groups[1]!!.value
|
||||
val s = context.encodeBase64(defaultDomain.toByteArray())
|
||||
return manga.copy(
|
||||
description = info.selectFirst(".description")?.html(),
|
||||
largeCoverUrl = info.select("img").first()?.let { img ->
|
||||
if (img.hasAttr("data-src")) img.attr("abs:data-src") else img.attr("abs:src")
|
||||
},
|
||||
author = info.selectFirst("p.fexi_header_para a.author_link")?.text(),
|
||||
state = parseStatus(info.select("p.fexi_header_para:contains(status)").first()?.ownText()),
|
||||
tags = manga.tags + info.select("div.col-xs-12.col-md-8.single-right-grid-right > p > a[href*=genres]")
|
||||
.mapNotNull {
|
||||
val a = it.selectFirst("a") ?: return@mapNotNull null
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = a.attr("href"),
|
||||
source = source,
|
||||
)
|
||||
},
|
||||
chapters = table.select("div.table.table-chapter-list").select("li.list-group-item.chapter_list")
|
||||
.asReversed().mapIndexed { i, li ->
|
||||
val a = li.select("a")
|
||||
val href = a.attr("data-href").ifEmpty {
|
||||
parseFailed("Link is missing")
|
||||
}
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.select("label").text(),
|
||||
number = i + 1,
|
||||
url = "$href?tr=$tr&s=$s",
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
uploadDate = dateFormat.tryParse(li.selectFirst("small:last-of-type")?.text()),
|
||||
source = MangaSource.MANGAOWL,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val doc = context.httpGet(fullUrl).parseHtml()
|
||||
val root = doc.body().select("div.item img.owl-lazy") ?: throw ParseException("Root not found")
|
||||
return root.map { div ->
|
||||
val url = div?.relUrl("data-src") ?: parseFailed("Page image not found")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
referer = url,
|
||||
source = MangaSource.MANGAOWL,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseStatus(status: String?) = when {
|
||||
status == null -> null
|
||||
status.contains("Ongoing") -> MangaState.ONGOING
|
||||
status.contains("Completed") -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = context.httpGet("https://${getDomain()}/").parseHtml()
|
||||
val root = doc.body().select("ul.dropdown-menu.multi-column.columns-3").select("li")
|
||||
return root.mapToSet { p ->
|
||||
val a = p.selectFirst("a") ?: parseFailed("a is null")
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = a.attr("href"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun getSortKey(sortOrder: SortOrder?) =
|
||||
when (sortOrder ?: sortOrders.minByOrNull { it.ordinal }) {
|
||||
SortOrder.POPULARITY -> "popular"
|
||||
SortOrder.NEWEST -> "new_release"
|
||||
SortOrder.UPDATED -> "lastest"
|
||||
else -> "lastest"
|
||||
}
|
||||
|
||||
private fun getAlternativeSortKey(sortOrder: SortOrder?) =
|
||||
when (sortOrder ?: sortOrders.minByOrNull { it.ordinal }) {
|
||||
SortOrder.POPULARITY -> "0"
|
||||
SortOrder.NEWEST -> "2"
|
||||
SortOrder.UPDATED -> "3"
|
||||
else -> "3"
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,215 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
internal class MangaTownParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val source = MangaSource.MANGATOWN
|
||||
|
||||
override val defaultDomain = "www.mangatown.com"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.ALPHABETICAL,
|
||||
SortOrder.RATING,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.UPDATED,
|
||||
)
|
||||
|
||||
private val regexTag = Regex("[^\\-]+-[^\\-]+-[^\\-]+-[^\\-]+-[^\\-]+-[^\\-]+")
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val sortKey = when (sortOrder) {
|
||||
SortOrder.ALPHABETICAL -> "?name.az"
|
||||
SortOrder.RATING -> "?rating.za"
|
||||
SortOrder.UPDATED -> "?last_chapter_time.za"
|
||||
else -> ""
|
||||
}
|
||||
val page = (offset / 30) + 1
|
||||
val url = when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
if (offset != 0) {
|
||||
return emptyList()
|
||||
}
|
||||
"/search?name=${query.urlEncoded()}".withDomain()
|
||||
}
|
||||
tags.isNullOrEmpty() -> "/directory/$page.htm$sortKey".withDomain()
|
||||
tags.size == 1 -> "/directory/${tags.first().key}/$page.htm$sortKey".withDomain()
|
||||
else -> tags.joinToString(
|
||||
prefix = "/search?page=$page".withDomain(),
|
||||
) { tag ->
|
||||
"&genres[${tag.key}]=1"
|
||||
}
|
||||
}
|
||||
val doc = context.httpGet(url).parseHtml()
|
||||
val root = doc.body().selectFirst("ul.manga_pic_list")
|
||||
?: throw ParseException("Root not found")
|
||||
return root.select("li").mapNotNull { li ->
|
||||
val a = li.selectFirst("a.manga_cover")
|
||||
val href = a?.relUrl("href")
|
||||
?: return@mapNotNull null
|
||||
val views = li.select("p.view")
|
||||
val status = views.findOwnText { x -> x.startsWith("Status:") }
|
||||
?.substringAfter(':')?.trim()?.lowercase(Locale.ROOT)
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
title = a.attr("title"),
|
||||
coverUrl = a.selectFirst("img")?.absUrl("src").orEmpty(),
|
||||
source = MangaSource.MANGATOWN,
|
||||
altTitle = null,
|
||||
rating = li.selectFirst("p.score")?.selectFirst("b")
|
||||
?.ownText()?.toFloatOrNull()?.div(5f) ?: Manga.NO_RATING,
|
||||
author = views.findText { x -> x.startsWith("Author:") }?.substringAfter(':')
|
||||
?.trim(),
|
||||
state = when (status) {
|
||||
"ongoing" -> MangaState.ONGOING
|
||||
"completed" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
tags = li.selectFirst("p.keyWord")?.select("a")?.mapNotNullToSet tags@{ x ->
|
||||
MangaTag(
|
||||
title = x.attr("title").toTitleCase(),
|
||||
key = x.attr("href").parseTagKey() ?: return@tags null,
|
||||
source = MangaSource.MANGATOWN,
|
||||
)
|
||||
}.orEmpty(),
|
||||
url = href,
|
||||
publicUrl = href.inContextOf(a),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(manga.url.withDomain()).parseHtml()
|
||||
val root = doc.body().selectFirst("section.main")
|
||||
?.selectFirst("div.article_content") ?: throw ParseException("Cannot find root")
|
||||
val info = root.selectFirst("div.detail_info")?.selectFirst("ul")
|
||||
val chaptersList = root.selectFirst("div.chapter_content")
|
||||
?.selectFirst("ul.chapter_list")?.select("li")?.asReversed()
|
||||
val dateFormat = SimpleDateFormat("MMM dd,yyyy", Locale.US)
|
||||
return manga.copy(
|
||||
tags = manga.tags + info?.select("li")?.find { x ->
|
||||
x.selectFirst("b")?.ownText() == "Genre(s):"
|
||||
}?.select("a")?.mapNotNull { a ->
|
||||
MangaTag(
|
||||
title = a.attr("title").toTitleCase(),
|
||||
key = a.attr("href").parseTagKey() ?: return@mapNotNull null,
|
||||
source = MangaSource.MANGATOWN,
|
||||
)
|
||||
}.orEmpty(),
|
||||
description = info?.getElementById("show")?.ownText(),
|
||||
chapters = chaptersList?.mapIndexedNotNull { i, li ->
|
||||
val href = li.selectFirst("a")?.relUrl("href")
|
||||
?: return@mapIndexedNotNull null
|
||||
val name = li.select("span").filter { it.className().isEmpty() }
|
||||
.joinToString(" - ") { it.text() }.trim()
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
source = MangaSource.MANGATOWN,
|
||||
number = i + 1,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
li.selectFirst("span.time")?.text(),
|
||||
),
|
||||
name = name.ifEmpty { "${manga.title} - ${i + 1}" },
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
} ?: bypassLicensedChapters(manga),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val doc = context.httpGet(fullUrl).parseHtml()
|
||||
val root = doc.body().selectFirst("div.page_select")
|
||||
?: throw ParseException("Cannot find root")
|
||||
return root.selectFirst("select")?.select("option")?.mapNotNull {
|
||||
val href = it.relUrl("value")
|
||||
if (href.endsWith("featured.html")) {
|
||||
return@mapNotNull null
|
||||
}
|
||||
MangaPage(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
preview = null,
|
||||
referer = fullUrl,
|
||||
source = MangaSource.MANGATOWN,
|
||||
)
|
||||
} ?: parseFailed("Pages list not found")
|
||||
}
|
||||
|
||||
override suspend fun getPageUrl(page: MangaPage): String {
|
||||
val doc = context.httpGet(page.url.withDomain()).parseHtml()
|
||||
return doc.getElementById("image")?.absUrl("src") ?: parseFailed("Image not found")
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = context.httpGet("/directory/".withDomain()).parseHtml()
|
||||
val root = doc.body().selectFirst("aside.right")
|
||||
?.getElementsContainingOwnText("Genres")
|
||||
?.first()
|
||||
?.nextElementSibling() ?: parseFailed("Root not found")
|
||||
return root.select("li").mapNotNullToSet { li ->
|
||||
val a = li.selectFirst("a") ?: return@mapNotNullToSet null
|
||||
val key = a.attr("href").parseTagKey()
|
||||
if (key.isNullOrEmpty()) {
|
||||
return@mapNotNullToSet null
|
||||
}
|
||||
MangaTag(
|
||||
source = MangaSource.MANGATOWN,
|
||||
key = key,
|
||||
title = a.text().toTitleCase(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
return when {
|
||||
date.isNullOrEmpty() -> 0L
|
||||
date.contains("Today") -> Calendar.getInstance().timeInMillis
|
||||
date.contains("Yesterday") -> Calendar.getInstance().apply { add(Calendar.DAY_OF_MONTH, -1) }.timeInMillis
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun bypassLicensedChapters(manga: Manga): List<MangaChapter> {
|
||||
val doc = context.httpGet(manga.url.withDomain("m")).parseHtml()
|
||||
val list = doc.body().selectFirst("ul.detail-ch-list") ?: return emptyList()
|
||||
val dateFormat = SimpleDateFormat("MMM dd,yyyy", Locale.US)
|
||||
return list.select("li").asReversed().mapIndexedNotNull { i, li ->
|
||||
val a = li.selectFirst("a") ?: return@mapIndexedNotNull null
|
||||
val href = a.relUrl("href")
|
||||
val name = a.selectFirst("span.vol")?.text().orEmpty().ifEmpty {
|
||||
a.ownText()
|
||||
}
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
source = MangaSource.MANGATOWN,
|
||||
number = i + 1,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
li.selectFirst("span.time")?.text(),
|
||||
),
|
||||
name = name.ifEmpty { "${manga.title} - ${i + 1}" },
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun String.parseTagKey() = split('/').findLast { regexTag matches it }
|
||||
}
|
||||
@ -0,0 +1,263 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
private const val PAGE_SIZE = 12
|
||||
|
||||
internal class MangareadParser(override val context: MangaLoaderContext) : MangaParser() {
|
||||
|
||||
override val source = MangaSource.MANGAREAD
|
||||
|
||||
override val defaultDomain = "www.mangaread.org"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val tag = when {
|
||||
tags.isNullOrEmpty() -> null
|
||||
tags.size == 1 -> tags.first()
|
||||
else -> throw NotImplementedError("Multiple genres are not supported by this source")
|
||||
}
|
||||
val payload = createRequestTemplate()
|
||||
payload["page"] = (offset / PAGE_SIZE.toFloat()).toIntUp().toString()
|
||||
payload["vars[meta_key]"] = when (sortOrder) {
|
||||
SortOrder.POPULARITY -> "_wp_manga_views"
|
||||
SortOrder.UPDATED -> "_latest_update"
|
||||
else -> "_wp_manga_views"
|
||||
}
|
||||
payload["vars[wp-manga-genre]"] = tag?.key.orEmpty()
|
||||
payload["vars[s]"] = query.orEmpty()
|
||||
val doc = context.httpPost(
|
||||
"https://${getDomain()}/wp-admin/admin-ajax.php",
|
||||
payload,
|
||||
).parseHtml()
|
||||
return doc.select("div.row.c-tabs-item__content").map { div ->
|
||||
val href = div.selectFirst("a")?.relUrl("href")
|
||||
?: parseFailed("Link not found")
|
||||
val summary = div.selectFirst(".tab-summary")
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
publicUrl = href.inContextOf(div),
|
||||
coverUrl = div.selectFirst("img")?.absUrl("data-src").orEmpty(),
|
||||
title = summary?.selectFirst("h3")?.text().orEmpty(),
|
||||
rating = div.selectFirst("span.total_votes")?.ownText()
|
||||
?.toFloatOrNull()?.div(5f) ?: -1f,
|
||||
tags = summary?.selectFirst(".mg_genres")?.select("a")?.mapToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix("/").substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = MangaSource.MANGAREAD,
|
||||
)
|
||||
}.orEmpty(),
|
||||
author = summary?.selectFirst(".mg_author")?.selectFirst("a")?.ownText(),
|
||||
state = when (
|
||||
summary?.selectFirst(".mg_status")?.selectFirst(".summary-content")
|
||||
?.ownText()?.trim()
|
||||
) {
|
||||
"OnGoing" -> MangaState.ONGOING
|
||||
"Completed" -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
source = MangaSource.MANGAREAD,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = context.httpGet("https://${getDomain()}/manga/").parseHtml()
|
||||
val root = doc.body().selectFirst("header")
|
||||
?.selectFirst("ul.second-menu") ?: parseFailed("Root not found")
|
||||
return root.select("li").mapNotNullToSet { li ->
|
||||
val a = li.selectFirst("a") ?: return@mapNotNullToSet null
|
||||
val href = a.attr("href").removeSuffix("/")
|
||||
.substringAfterLast("genres/", "")
|
||||
if (href.isEmpty()) {
|
||||
return@mapNotNullToSet null
|
||||
}
|
||||
MangaTag(
|
||||
key = href,
|
||||
title = a.text().toTitleCase(),
|
||||
source = MangaSource.MANGAREAD,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val fullUrl = manga.url.withDomain()
|
||||
val doc = context.httpGet(fullUrl).parseHtml()
|
||||
val root = doc.body().selectFirst("div.profile-manga")
|
||||
?.selectFirst("div.summary_content")
|
||||
?.selectFirst("div.post-content")
|
||||
?: throw ParseException("Root not found")
|
||||
val root2 = doc.body().selectFirst("div.content-area")
|
||||
?.selectFirst("div.c-page")
|
||||
?: throw ParseException("Root2 not found")
|
||||
val dateFormat = SimpleDateFormat("MMMM dd, yyyy", Locale.US)
|
||||
return manga.copy(
|
||||
tags = root.selectFirst("div.genres-content")?.select("a")
|
||||
?.mapNotNullToSet { a ->
|
||||
MangaTag(
|
||||
key = a.attr("href").removeSuffix("/").substringAfterLast('/'),
|
||||
title = a.text().toTitleCase(),
|
||||
source = MangaSource.MANGAREAD,
|
||||
)
|
||||
} ?: manga.tags,
|
||||
description = root2.selectFirst("div.description-summary")
|
||||
?.selectFirst("div.summary__content")
|
||||
?.select("p")
|
||||
?.filterNot { it.ownText().startsWith("A brief description") }
|
||||
?.joinToString { it.html() },
|
||||
chapters = root2.select("li").asReversed().mapIndexed { i, li ->
|
||||
val a = li.selectFirst("a")
|
||||
val href = a?.relUrl("href").orEmpty().ifEmpty {
|
||||
parseFailed("Link is missing")
|
||||
}
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a!!.ownText(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDate(
|
||||
dateFormat,
|
||||
li.selectFirst("span.chapter-release-date i")?.text(),
|
||||
),
|
||||
source = MangaSource.MANGAREAD,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val doc = context.httpGet(fullUrl).parseHtml()
|
||||
val root = doc.body().selectFirst("div.main-col-inner")
|
||||
?.selectFirst("div.reading-content")
|
||||
?: throw ParseException("Root not found")
|
||||
return root.select("div.page-break").map { div ->
|
||||
val img = div.selectFirst("img") ?: parseFailed("Page image not found")
|
||||
val url = img.relUrl("data-src").ifEmpty {
|
||||
img.relUrl("src")
|
||||
}
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
preview = null,
|
||||
referer = fullUrl,
|
||||
source = MangaSource.MANGAREAD,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
|
||||
|
||||
date ?: return 0
|
||||
return when {
|
||||
date.endsWith(" ago", ignoreCase = true) -> {
|
||||
parseRelativeDate(date)
|
||||
}
|
||||
// Handle translated 'ago' in Portuguese.
|
||||
date.endsWith(" atrás", ignoreCase = true) -> {
|
||||
parseRelativeDate(date)
|
||||
}
|
||||
// Handle translated 'ago' in Turkish.
|
||||
date.endsWith(" önce", ignoreCase = true) -> {
|
||||
parseRelativeDate(date)
|
||||
}
|
||||
// Handle 'yesterday' and 'today', using midnight
|
||||
date.startsWith("year", ignoreCase = true) -> {
|
||||
Calendar.getInstance().apply {
|
||||
add(Calendar.DAY_OF_MONTH, -1) // yesterday
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
}
|
||||
date.startsWith("today", ignoreCase = true) -> {
|
||||
Calendar.getInstance().apply {
|
||||
set(Calendar.HOUR_OF_DAY, 0)
|
||||
set(Calendar.MINUTE, 0)
|
||||
set(Calendar.SECOND, 0)
|
||||
set(Calendar.MILLISECOND, 0)
|
||||
}.timeInMillis
|
||||
}
|
||||
date.contains(Regex("""\d(st|nd|rd|th)""")) -> {
|
||||
// Clean date (e.g. 5th December 2019 to 5 December 2019) before parsing it
|
||||
date.split(" ").map {
|
||||
if (it.contains(Regex("""\d\D\D"""))) {
|
||||
it.replace(Regex("""\D"""), "")
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
.let { dateFormat.tryParse(it.joinToString(" ")) }
|
||||
}
|
||||
else -> dateFormat.tryParse(date)
|
||||
}
|
||||
}
|
||||
|
||||
// Parses dates in this form:
|
||||
// 21 hours ago
|
||||
private fun parseRelativeDate(date: String): Long {
|
||||
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
|
||||
val cal = Calendar.getInstance()
|
||||
|
||||
return when {
|
||||
WordSet(
|
||||
"hari",
|
||||
"gün",
|
||||
"jour",
|
||||
"día",
|
||||
"dia",
|
||||
"day",
|
||||
).anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
|
||||
WordSet("jam", "saat", "heure", "hora", "hour").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.HOUR,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
WordSet("menit", "dakika", "min", "minute", "minuto").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.MINUTE,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
WordSet("detik", "segundo", "second").anyWordIn(date) -> cal.apply {
|
||||
add(
|
||||
Calendar.SECOND,
|
||||
-number,
|
||||
)
|
||||
}.timeInMillis
|
||||
WordSet("month").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
|
||||
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
|
||||
else -> 0
|
||||
}
|
||||
}
|
||||
|
||||
private fun createRequestTemplate() =
|
||||
"action=madara_load_more&page=1&template=madara-core%2Fcontent%2Fcontent-search&vars%5Bs%5D=&vars%5Borderby%5D=meta_value_num&vars%5Bpaged%5D=1&vars%5Btemplate%5D=search&vars%5Bmeta_query%5D%5B0%5D%5Brelation%5D=AND&vars%5Bmeta_query%5D%5Brelation%5D=OR&vars%5Bpost_type%5D=wp-manga&vars%5Bpost_status%5D=publish&vars%5Bmeta_key%5D=_latest_update&vars%5Border%5D=desc&vars%5Bmanga_archives_item_layout%5D=default"
|
||||
.split('&')
|
||||
.map {
|
||||
val pos = it.indexOf('=')
|
||||
it.substring(0, pos) to it.substring(pos + 1)
|
||||
}.toMutableMap()
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
|
||||
internal class MintMangaParser(override val context: MangaLoaderContext) : GroupleParser() {
|
||||
|
||||
override val source = MangaSource.MINTMANGA
|
||||
override val defaultDomain: String = "mintmanga.live"
|
||||
}
|
||||
@ -0,0 +1,251 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import okhttp3.Headers
|
||||
import okhttp3.HttpUrl.Companion.toHttpUrl
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
private const val PAGE_SIZE = 26
|
||||
|
||||
internal abstract class NineMangaParser(
|
||||
final override val context: MangaLoaderContext,
|
||||
final override val source: MangaSource,
|
||||
final override val defaultDomain: String,
|
||||
) : MangaParser() {
|
||||
|
||||
init {
|
||||
context.cookieJar.insertCookies(getDomain(), "ninemanga_template_desk=yes")
|
||||
}
|
||||
|
||||
private val headers = Headers.Builder()
|
||||
.add("Accept-Language", "en-US;q=0.7,en;q=0.3")
|
||||
.build()
|
||||
|
||||
override val sortOrders: Set<SortOrder> = Collections.singleton(
|
||||
SortOrder.POPULARITY,
|
||||
)
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val page = (offset / PAGE_SIZE.toFloat()).toIntUp() + 1
|
||||
val url = buildString {
|
||||
append("https://")
|
||||
append(getDomain())
|
||||
when {
|
||||
!query.isNullOrEmpty() -> {
|
||||
append("/search/?name_sel=&wd=")
|
||||
append(query.urlEncoded())
|
||||
append("&page=")
|
||||
}
|
||||
!tags.isNullOrEmpty() -> {
|
||||
append("/search/?category_id=")
|
||||
for (tag in tags) {
|
||||
append(tag.key)
|
||||
append(',')
|
||||
}
|
||||
append("&page=")
|
||||
}
|
||||
else -> {
|
||||
append("/category/index_")
|
||||
}
|
||||
}
|
||||
append(page)
|
||||
append(".html")
|
||||
}
|
||||
val doc = context.httpGet(url, headers).parseHtml()
|
||||
val root = doc.body().selectFirst("ul.direlist")
|
||||
?: throw ParseException("Cannot find root")
|
||||
val baseHost = root.baseUri().toHttpUrl().host
|
||||
return root.select("li").map { node ->
|
||||
val href = node.selectFirst("a")?.absUrl("href")
|
||||
?: parseFailed("Link not found")
|
||||
val relUrl = href.toRelativeUrl(baseHost)
|
||||
val dd = node.selectFirst("dd")
|
||||
Manga(
|
||||
id = generateUid(relUrl),
|
||||
url = relUrl,
|
||||
publicUrl = href,
|
||||
title = dd?.selectFirst("a.bookname")?.text()?.toCamelCase().orEmpty(),
|
||||
altTitle = null,
|
||||
coverUrl = node.selectFirst("img")?.absUrl("src").orEmpty(),
|
||||
rating = Manga.NO_RATING,
|
||||
author = null,
|
||||
tags = emptySet(),
|
||||
state = null,
|
||||
source = source,
|
||||
description = dd?.selectFirst("p")?.html(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(
|
||||
manga.url.withDomain() + "?waring=1",
|
||||
headers,
|
||||
).parseHtml()
|
||||
val root = doc.body().selectFirst("div.manga")
|
||||
?: throw ParseException("Cannot find root")
|
||||
val infoRoot = root.selectFirst("div.bookintro")
|
||||
?: throw ParseException("Cannot find info")
|
||||
return manga.copy(
|
||||
tags = infoRoot.getElementsByAttributeValue("itemprop", "genre").first()
|
||||
?.select("a")?.mapToSet { a ->
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = a.attr("href").substringBetween("/", "."),
|
||||
source = source,
|
||||
)
|
||||
}.orEmpty(),
|
||||
author = infoRoot.getElementsByAttributeValue("itemprop", "author").first()?.text(),
|
||||
state = parseStatus(infoRoot.select("li a.red").text()),
|
||||
description = infoRoot.getElementsByAttributeValue("itemprop", "description").first()
|
||||
?.html()?.substringAfter("</b>"),
|
||||
chapters = root.selectFirst("div.chapterbox")?.select("ul.sub_vol_ul > li")
|
||||
?.asReversed()?.mapIndexed { i, li ->
|
||||
val a = li.selectFirst("a.chapter_list_a")
|
||||
val href = a?.relUrl("href")?.replace("%20", " ") ?: parseFailed("Link not found")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.text(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = parseChapterDateByLang(li.selectFirst("span")?.text().orEmpty()),
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val doc = context.httpGet(chapter.url.withDomain(), headers).parseHtml()
|
||||
return doc.body().getElementById("page")?.select("option")?.map { option ->
|
||||
val url = option.attr("value")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
referer = chapter.url.withDomain(),
|
||||
preview = null,
|
||||
source = source,
|
||||
)
|
||||
} ?: throw ParseException("Pages list not found at ${chapter.url}")
|
||||
}
|
||||
|
||||
override suspend fun getPageUrl(page: MangaPage): String {
|
||||
val doc = context.httpGet(page.url.withDomain(), headers).parseHtml()
|
||||
val root = doc.body()
|
||||
return root.selectFirst("a.pic_download")?.absUrl("href")
|
||||
?: throw ParseException("Page image not found")
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val doc = context.httpGet("https://${getDomain()}/search/?type=high", headers)
|
||||
.parseHtml()
|
||||
val root = doc.body().getElementById("search_form")
|
||||
return root?.select("li.cate_list")?.mapNotNullToSet { li ->
|
||||
val cateId = li.attr("cate_id") ?: return@mapNotNullToSet null
|
||||
val a = li.selectFirst("a") ?: return@mapNotNullToSet null
|
||||
MangaTag(
|
||||
title = a.text().toTitleCase(),
|
||||
key = cateId,
|
||||
source = source,
|
||||
)
|
||||
} ?: parseFailed("Root not found")
|
||||
}
|
||||
|
||||
private fun parseStatus(status: String) = when {
|
||||
status.contains("Ongoing") -> MangaState.ONGOING
|
||||
status.contains("Completed") -> MangaState.FINISHED
|
||||
else -> null
|
||||
}
|
||||
|
||||
private fun parseChapterDateByLang(date: String): Long {
|
||||
val dateWords = date.split(" ")
|
||||
|
||||
if (dateWords.size == 3) {
|
||||
if (dateWords[1].contains(",")) {
|
||||
SimpleDateFormat("MMM d, yyyy", Locale.ENGLISH).tryParse(date)
|
||||
} else {
|
||||
val timeAgo = Integer.parseInt(dateWords[0])
|
||||
return Calendar.getInstance().apply {
|
||||
when (dateWords[1]) {
|
||||
"minutes" -> Calendar.MINUTE // EN-FR
|
||||
"hours" -> Calendar.HOUR // EN
|
||||
|
||||
"minutos" -> Calendar.MINUTE // ES
|
||||
"horas" -> Calendar.HOUR
|
||||
|
||||
// "minutos" -> Calendar.MINUTE // BR
|
||||
"hora" -> Calendar.HOUR
|
||||
|
||||
"минут" -> Calendar.MINUTE // RU
|
||||
"часа" -> Calendar.HOUR
|
||||
|
||||
"Stunden" -> Calendar.HOUR // DE
|
||||
|
||||
"minuti" -> Calendar.MINUTE // IT
|
||||
"ore" -> Calendar.HOUR
|
||||
|
||||
"heures" -> Calendar.HOUR // FR ("minutes" also French word)
|
||||
else -> null
|
||||
}?.let {
|
||||
add(it, -timeAgo)
|
||||
}
|
||||
}.timeInMillis
|
||||
}
|
||||
}
|
||||
return 0L
|
||||
}
|
||||
|
||||
class English(context: MangaLoaderContext) : NineMangaParser(
|
||||
context,
|
||||
MangaSource.NINEMANGA_EN,
|
||||
"www.ninemanga.com",
|
||||
)
|
||||
|
||||
class Spanish(context: MangaLoaderContext) : NineMangaParser(
|
||||
context,
|
||||
MangaSource.NINEMANGA_ES,
|
||||
"es.ninemanga.com",
|
||||
)
|
||||
|
||||
class Russian(context: MangaLoaderContext) : NineMangaParser(
|
||||
context,
|
||||
MangaSource.NINEMANGA_RU,
|
||||
"ru.ninemanga.com",
|
||||
)
|
||||
|
||||
class Deutsch(context: MangaLoaderContext) : NineMangaParser(
|
||||
context,
|
||||
MangaSource.NINEMANGA_DE,
|
||||
"de.ninemanga.com",
|
||||
)
|
||||
|
||||
class Brazil(context: MangaLoaderContext) : NineMangaParser(
|
||||
context,
|
||||
MangaSource.NINEMANGA_BR,
|
||||
"br.ninemanga.com",
|
||||
)
|
||||
|
||||
class Italiano(context: MangaLoaderContext) : NineMangaParser(
|
||||
context,
|
||||
MangaSource.NINEMANGA_IT,
|
||||
"it.ninemanga.com",
|
||||
)
|
||||
|
||||
class Francais(context: MangaLoaderContext) : NineMangaParser(
|
||||
context,
|
||||
MangaSource.NINEMANGA_FR,
|
||||
"fr.ninemanga.com",
|
||||
)
|
||||
}
|
||||
@ -0,0 +1,225 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import androidx.collection.SparseArrayCompat
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
|
||||
import org.koitharu.kotatsu.parsers.exception.AuthRequiredException
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
import java.util.regex.Pattern
|
||||
|
||||
internal class NudeMoonParser(override val context: MangaLoaderContext) : MangaParser(), MangaParserAuthProvider {
|
||||
|
||||
override val source = MangaSource.NUDEMOON
|
||||
override val defaultDomain = "nude-moon.net"
|
||||
override val authUrl: String
|
||||
get() = "https://${getDomain()}/index.php"
|
||||
|
||||
override val isAuthorized: Boolean
|
||||
get() {
|
||||
return context.cookieJar.getCookies(getDomain()).any {
|
||||
it.name == "fusion_user"
|
||||
}
|
||||
}
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.NEWEST,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.RATING,
|
||||
)
|
||||
|
||||
private val pageUrlPatter = Pattern.compile(".*\\?page=[0-9]+$")
|
||||
|
||||
init {
|
||||
context.cookieJar.insertCookies(
|
||||
getDomain(),
|
||||
"NMfYa=1;",
|
||||
"nm_mobile=0;",
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
val domain = getDomain()
|
||||
val url = when {
|
||||
!query.isNullOrEmpty() -> "https://$domain/search?stext=${query.urlEncoded()}&rowstart=$offset"
|
||||
!tags.isNullOrEmpty() -> tags.joinToString(
|
||||
separator = "_",
|
||||
prefix = "https://$domain/tags/",
|
||||
postfix = "&rowstart=$offset",
|
||||
transform = { it.key.urlEncoded() },
|
||||
)
|
||||
else -> "https://$domain/all_manga?${getSortKey(sortOrder)}&rowstart=$offset"
|
||||
}
|
||||
val doc = context.httpGet(url).parseHtml()
|
||||
val root = doc.body().run {
|
||||
selectFirst("td.main-bg") ?: selectFirst("td.main-body")
|
||||
} ?: parseFailed("Cannot find root")
|
||||
return root.select("table.news_pic2").mapNotNull { row ->
|
||||
val a = row.selectFirst("td.bg_style1")?.selectFirst("a")
|
||||
?: return@mapNotNull null
|
||||
val href = a.relUrl("href")
|
||||
val title = a.selectFirst("h2")?.text().orEmpty()
|
||||
val info = row.selectFirst("td[width=100%]") ?: return@mapNotNull null
|
||||
Manga(
|
||||
id = generateUid(href),
|
||||
url = href,
|
||||
title = title.substringAfter(" / "),
|
||||
altTitle = title.substringBefore(" / ", "")
|
||||
.takeUnless { it.isBlank() },
|
||||
author = info.getElementsContainingOwnText("Автор:").firstOrNull()
|
||||
?.nextElementSibling()?.ownText(),
|
||||
coverUrl = row.selectFirst("img.news_pic2")?.absUrl("data-src")
|
||||
.orEmpty(),
|
||||
tags = row.selectFirst("span.tag-links")?.select("a")
|
||||
?.mapToSet {
|
||||
MangaTag(
|
||||
title = it.text().toTitleCase(),
|
||||
key = it.attr("href").substringAfterLast('/'),
|
||||
source = source,
|
||||
)
|
||||
}.orEmpty(),
|
||||
source = source,
|
||||
publicUrl = a.absUrl("href"),
|
||||
rating = Manga.NO_RATING,
|
||||
isNsfw = true,
|
||||
description = row.selectFirst("div.description")?.html(),
|
||||
state = null,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val body = context.httpGet(manga.url.withDomain()).parseHtml().body()
|
||||
val root = body.selectFirst("table.shoutbox")
|
||||
?: parseFailed("Cannot find root")
|
||||
val info = root.select("div.tbl2")
|
||||
val lastInfo = info.last()
|
||||
return manga.copy(
|
||||
largeCoverUrl = body.selectFirst("img.news_pic2")?.absUrl("src"),
|
||||
description = info.select("div.blockquote").lastOrNull()?.html() ?: manga.description,
|
||||
tags = info.select("span.tag-links").firstOrNull()?.select("a")?.mapToSet {
|
||||
MangaTag(
|
||||
title = it.text().toTitleCase(),
|
||||
key = it.attr("href").substringAfterLast('/'),
|
||||
source = source,
|
||||
)
|
||||
}?.plus(manga.tags) ?: manga.tags,
|
||||
author = lastInfo?.getElementsByAttributeValueContaining("href", "mangaka/")?.text()
|
||||
?: manga.author,
|
||||
chapters = listOf(
|
||||
MangaChapter(
|
||||
id = manga.id,
|
||||
url = manga.url,
|
||||
source = source,
|
||||
number = 1,
|
||||
name = manga.title,
|
||||
scanlator = lastInfo?.getElementsByAttributeValueContaining("href", "perevod/")?.text(),
|
||||
uploadDate = lastInfo?.getElementsContainingOwnText("Дата:")
|
||||
?.firstOrNull()
|
||||
?.html()
|
||||
?.parseDate() ?: 0L,
|
||||
branch = null,
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val fullUrl = chapter.url.withDomain()
|
||||
val doc = context.httpGet(fullUrl).parseHtml()
|
||||
val root = doc.body().selectFirst("td.main-body")
|
||||
?: parseFailed("Cannot find root")
|
||||
val readlink = root.selectFirst("table.shoutbox")?.selectFirst("a")?.absUrl("href")
|
||||
?: parseFailed("Cannot obtain read link")
|
||||
val fullPages = getFullPages(readlink)
|
||||
return root.getElementsByAttributeValueMatching("href", pageUrlPatter).mapIndexedNotNull { i, a ->
|
||||
val url = a.relUrl("href")
|
||||
MangaPage(
|
||||
id = generateUid(url),
|
||||
url = fullPages[i] ?: return@mapIndexedNotNull null,
|
||||
referer = fullUrl,
|
||||
preview = a.selectFirst("img")?.absUrl("src"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val domain = getDomain()
|
||||
val doc = context.httpGet("https://$domain/all_manga").parseHtml()
|
||||
val root = doc.body().getElementsContainingOwnText("Поиск манги по тегам")
|
||||
.firstOrNull()?.parents()?.find { it.tag().normalName() == "tbody" }
|
||||
?.selectFirst("td.textbox")?.selectFirst("td.small")
|
||||
?: parseFailed("Tags root not found")
|
||||
return root.select("a").mapToSet {
|
||||
MangaTag(
|
||||
title = it.text().toTitleCase(),
|
||||
key = it.attr("href").substringAfterLast('/')
|
||||
.removeSuffix("+"),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getUsername(): String {
|
||||
val body = context.httpGet("https://${getDomain()}/").parseHtml()
|
||||
.body()
|
||||
return body
|
||||
.getElementsContainingOwnText("Профиль")
|
||||
.firstOrNull()
|
||||
?.attr("href")
|
||||
?.substringAfterLast('/')
|
||||
?: run {
|
||||
throw if (body.selectFirst("form[name=\"loginform\"]") != null) {
|
||||
AuthRequiredException(source)
|
||||
} else {
|
||||
ParseException("Cannot find username")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun getFullPages(url: String): SparseArrayCompat<String> {
|
||||
val scripts = context.httpGet(url).parseHtml().select("script")
|
||||
val regex = "images\\[(\\d+)].src = '([^']+)'".toRegex()
|
||||
for (script in scripts) {
|
||||
val src = script.html()
|
||||
if (src.isEmpty()) {
|
||||
continue
|
||||
}
|
||||
val matches = regex.findAll(src).toList()
|
||||
if (matches.isEmpty()) {
|
||||
continue
|
||||
}
|
||||
val result = SparseArrayCompat<String>(matches.size)
|
||||
matches.forEach { match ->
|
||||
val (index, link) = match.destructured
|
||||
result.append(index.toInt(), link)
|
||||
}
|
||||
return result
|
||||
}
|
||||
parseFailed("Cannot find pages list")
|
||||
}
|
||||
|
||||
private fun getSortKey(sortOrder: SortOrder?) =
|
||||
when (sortOrder ?: sortOrders.minByOrNull { it.ordinal }) {
|
||||
SortOrder.POPULARITY -> "views"
|
||||
SortOrder.NEWEST -> "date"
|
||||
SortOrder.RATING -> "like"
|
||||
else -> "like"
|
||||
}
|
||||
|
||||
private fun String.parseDate(): Long {
|
||||
val dateString = substringBetweenFirst("Дата:", "<")?.trim() ?: return 0
|
||||
val dateFormat = SimpleDateFormat("d MMMM yyyy", Locale("ru"))
|
||||
return dateFormat.tryParse(dateString)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
|
||||
internal class ReadmangaParser(override val context: MangaLoaderContext) : GroupleParser() {
|
||||
|
||||
override val defaultDomain = "readmanga.io"
|
||||
override val source = MangaSource.READMANGA_RU
|
||||
}
|
||||
@ -0,0 +1,262 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import okhttp3.Headers
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONException
|
||||
import org.json.JSONObject
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.MangaParser
|
||||
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.*
|
||||
import org.koitharu.kotatsu.parsers.util.*
|
||||
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSON
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSONTo
|
||||
import org.koitharu.kotatsu.parsers.util.json.mapJSONToSet
|
||||
import java.net.URLDecoder
|
||||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.*
|
||||
|
||||
private const val PAGE_SIZE = 30
|
||||
private const val STATUS_ONGOING = 1
|
||||
private const val STATUS_FINISHED = 0
|
||||
|
||||
internal class RemangaParser(override val context: MangaLoaderContext) : MangaParser(), MangaParserAuthProvider {
|
||||
|
||||
override val source = MangaSource.REMANGA
|
||||
|
||||
override val defaultDomain = "remanga.org"
|
||||
override val authUrl: String
|
||||
get() = "https://${getDomain()}/user/login"
|
||||
|
||||
override val sortOrders: Set<SortOrder> = EnumSet.of(
|
||||
SortOrder.UPDATED,
|
||||
SortOrder.POPULARITY,
|
||||
SortOrder.RATING,
|
||||
SortOrder.NEWEST,
|
||||
)
|
||||
|
||||
override val isAuthorized: Boolean
|
||||
get() {
|
||||
return context.cookieJar.getCookies(getDomain()).any {
|
||||
it.name == "user"
|
||||
}
|
||||
}
|
||||
|
||||
private val regexLastUrlPath = Regex("/[^/]+/?$")
|
||||
|
||||
override suspend fun getList(
|
||||
offset: Int,
|
||||
query: String?,
|
||||
tags: Set<MangaTag>?,
|
||||
sortOrder: SortOrder?,
|
||||
): List<Manga> {
|
||||
copyCookies()
|
||||
val domain = getDomain()
|
||||
val urlBuilder = StringBuilder()
|
||||
.append("https://api.")
|
||||
.append(domain)
|
||||
if (query != null) {
|
||||
urlBuilder.append("/api/search/?query=")
|
||||
.append(query.urlEncoded())
|
||||
} else {
|
||||
urlBuilder.append("/api/search/catalog/?ordering=")
|
||||
.append(getSortKey(sortOrder))
|
||||
tags?.forEach { tag ->
|
||||
urlBuilder.append("&genres=")
|
||||
urlBuilder.append(tag.key)
|
||||
}
|
||||
}
|
||||
urlBuilder
|
||||
.append("&page=")
|
||||
.append((offset / PAGE_SIZE) + 1)
|
||||
.append("&count=")
|
||||
.append(PAGE_SIZE)
|
||||
val content = context.httpGet(urlBuilder.toString(), getApiHeaders()).parseJson()
|
||||
.getJSONArray("content")
|
||||
return content.mapJSON { jo ->
|
||||
val url = "/manga/${jo.getString("dir")}"
|
||||
val img = jo.getJSONObject("img")
|
||||
Manga(
|
||||
id = generateUid(url),
|
||||
url = url,
|
||||
publicUrl = "https://$domain$url",
|
||||
title = jo.getString("rus_name"),
|
||||
altTitle = jo.getString("en_name"),
|
||||
rating = jo.getString("avg_rating").toFloatOrNull()?.div(10f) ?: Manga.NO_RATING,
|
||||
coverUrl = "https://api.$domain${img.getString("mid")}",
|
||||
largeCoverUrl = "https://api.$domain${img.getString("high")}",
|
||||
author = null,
|
||||
tags = jo.optJSONArray("genres")?.mapJSONToSet { g ->
|
||||
MangaTag(
|
||||
title = g.getString("name").toTitleCase(),
|
||||
key = g.getInt("id").toString(),
|
||||
source = MangaSource.REMANGA,
|
||||
)
|
||||
}.orEmpty(),
|
||||
source = MangaSource.REMANGA,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
copyCookies()
|
||||
val domain = getDomain()
|
||||
val slug = manga.url.find(regexLastUrlPath)
|
||||
?: throw ParseException("Cannot obtain slug from ${manga.url}")
|
||||
val data = context.httpGet(
|
||||
url = "https://api.$domain/api/titles/$slug/",
|
||||
headers = getApiHeaders(),
|
||||
).parseJson()
|
||||
val content = try {
|
||||
data.getJSONObject("content")
|
||||
} catch (e: JSONException) {
|
||||
throw ParseException(data.optString("msg"), e)
|
||||
}
|
||||
val branchId = content.getJSONArray("branches").optJSONObject(0)
|
||||
?.getLong("id") ?: throw ParseException("No branches found")
|
||||
val chapters = grabChapters(domain, branchId)
|
||||
val dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US)
|
||||
return manga.copy(
|
||||
description = content.getString("description"),
|
||||
state = when (content.optJSONObject("status")?.getInt("id")) {
|
||||
STATUS_ONGOING -> MangaState.ONGOING
|
||||
STATUS_FINISHED -> MangaState.FINISHED
|
||||
else -> null
|
||||
},
|
||||
tags = content.getJSONArray("genres").mapJSONToSet { g ->
|
||||
MangaTag(
|
||||
title = g.getString("name").toTitleCase(),
|
||||
key = g.getInt("id").toString(),
|
||||
source = MangaSource.REMANGA,
|
||||
)
|
||||
},
|
||||
chapters = chapters.mapIndexed { i, jo ->
|
||||
val id = jo.getLong("id")
|
||||
val name = jo.getString("name").toTitleCase(Locale.ROOT)
|
||||
val publishers = jo.optJSONArray("publishers")
|
||||
MangaChapter(
|
||||
id = generateUid(id),
|
||||
url = "/api/titles/chapters/$id/",
|
||||
number = chapters.size - i,
|
||||
name = buildString {
|
||||
append("Том ")
|
||||
append(jo.optString("tome", "0"))
|
||||
append(". ")
|
||||
append("Глава ")
|
||||
append(jo.optString("chapter", "0"))
|
||||
if (name.isNotEmpty()) {
|
||||
append(" - ")
|
||||
append(name)
|
||||
}
|
||||
},
|
||||
uploadDate = dateFormat.tryParse(jo.getString("upload_date")),
|
||||
scanlator = publishers?.optJSONObject(0)?.getStringOrNull("name"),
|
||||
source = MangaSource.REMANGA,
|
||||
branch = null,
|
||||
)
|
||||
}.asReversed(),
|
||||
)
|
||||
}
|
||||
|
||||
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
|
||||
val referer = "https://${getDomain()}/"
|
||||
val content = context.httpGet(chapter.url.withDomain(subdomain = "api"), getApiHeaders()).parseJson()
|
||||
.getJSONObject("content")
|
||||
val pages = content.optJSONArray("pages")
|
||||
if (pages == null) {
|
||||
val pubDate = content.getStringOrNull("pub_date")?.let {
|
||||
SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US).tryParse(it)
|
||||
}
|
||||
if (pubDate != null && pubDate > System.currentTimeMillis()) {
|
||||
val at = SimpleDateFormat.getDateInstance(DateFormat.LONG).format(Date(pubDate))
|
||||
parseFailed("Глава станет доступной $at")
|
||||
} else {
|
||||
parseFailed("Глава недоступна")
|
||||
}
|
||||
}
|
||||
val result = ArrayList<MangaPage>(pages.length())
|
||||
for (i in 0 until pages.length()) {
|
||||
when (val item = pages.get(i)) {
|
||||
is JSONObject -> result += parsePage(item, referer)
|
||||
is JSONArray -> item.mapJSONTo(result) { parsePage(it, referer) }
|
||||
else -> throw ParseException("Unknown json item $item")
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
override suspend fun getTags(): Set<MangaTag> {
|
||||
val domain = getDomain()
|
||||
val content = context.httpGet("https://api.$domain/api/forms/titles/?get=genres", getApiHeaders())
|
||||
.parseJson().getJSONObject("content").getJSONArray("genres")
|
||||
return content.mapJSONToSet { jo ->
|
||||
MangaTag(
|
||||
title = jo.getString("name").toTitleCase(),
|
||||
key = jo.getInt("id").toString(),
|
||||
source = source,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun getUsername(): String {
|
||||
val jo = context.httpGet(
|
||||
url = "https://api.${getDomain()}/api/users/current/",
|
||||
headers = getApiHeaders(),
|
||||
).parseJson()
|
||||
return jo.getJSONObject("content").getString("username")
|
||||
}
|
||||
|
||||
private fun getApiHeaders(): Headers? {
|
||||
val userCookie = context.cookieJar.getCookies(getDomain()).find {
|
||||
it.name == "user"
|
||||
} ?: return null
|
||||
val jo = JSONObject(URLDecoder.decode(userCookie.value, Charsets.UTF_8.name()))
|
||||
val accessToken = jo.getStringOrNull("access_token") ?: return null
|
||||
return Headers.headersOf("authorization", "bearer $accessToken")
|
||||
}
|
||||
|
||||
private fun copyCookies() {
|
||||
val domain = getDomain()
|
||||
context.cookieJar.copyCookies(domain, "api.$domain")
|
||||
}
|
||||
|
||||
private fun getSortKey(order: SortOrder?) = when (order) {
|
||||
SortOrder.UPDATED -> "-chapter_date"
|
||||
SortOrder.POPULARITY -> "-rating"
|
||||
SortOrder.RATING -> "-votes"
|
||||
SortOrder.NEWEST -> "-id"
|
||||
else -> "-chapter_date"
|
||||
}
|
||||
|
||||
private fun parsePage(jo: JSONObject, referer: String) = MangaPage(
|
||||
id = generateUid(jo.getLong("id")),
|
||||
url = jo.getString("link"),
|
||||
preview = null,
|
||||
referer = referer,
|
||||
source = source,
|
||||
)
|
||||
|
||||
private suspend fun grabChapters(domain: String, branchId: Long): List<JSONObject> {
|
||||
val result = ArrayList<JSONObject>(100)
|
||||
var page = 1
|
||||
while (true) {
|
||||
val content = context.httpGet(
|
||||
url = "https://api.$domain/api/titles/chapters/?branch_id=$branchId&page=$page&count=100",
|
||||
headers = getApiHeaders(),
|
||||
).parseJson().getJSONArray("content")
|
||||
val len = content.length()
|
||||
if (len == 0) {
|
||||
break
|
||||
}
|
||||
result.ensureCapacity(result.size + len)
|
||||
for (i in 0 until len) {
|
||||
result.add(content.getJSONObject(i))
|
||||
}
|
||||
page++
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,10 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
|
||||
internal class SelfMangaParser(override val context: MangaLoaderContext) : GroupleParser() {
|
||||
|
||||
override val defaultDomain = "selfmanga.live"
|
||||
override val source = MangaSource.SELFMANGA
|
||||
}
|
||||
@ -0,0 +1,40 @@
|
||||
package org.koitharu.kotatsu.parsers.site
|
||||
|
||||
import org.koitharu.kotatsu.parsers.MangaLoaderContext
|
||||
import org.koitharu.kotatsu.parsers.exception.ParseException
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaChapter
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.util.parseHtml
|
||||
import org.koitharu.kotatsu.parsers.util.relUrl
|
||||
|
||||
internal class YaoiChanParser(override val context: MangaLoaderContext) : ChanParser() {
|
||||
|
||||
override val source = MangaSource.YAOICHAN
|
||||
override val defaultDomain = "yaoi-chan.me"
|
||||
|
||||
override suspend fun getDetails(manga: Manga): Manga {
|
||||
val doc = context.httpGet(manga.url.withDomain()).parseHtml()
|
||||
val root =
|
||||
doc.body().getElementById("dle-content") ?: throw ParseException("Cannot find root")
|
||||
return manga.copy(
|
||||
description = root.getElementById("description")?.html()?.substringBeforeLast("<div"),
|
||||
largeCoverUrl = root.getElementById("cover")?.absUrl("src"),
|
||||
chapters = root.select("table.table_cha").flatMap { table ->
|
||||
table.select("div.manga")
|
||||
}.mapNotNull { it.selectFirst("a") }.reversed().mapIndexed { i, a ->
|
||||
val href = a.relUrl("href")
|
||||
MangaChapter(
|
||||
id = generateUid(href),
|
||||
name = a.text().trim(),
|
||||
number = i + 1,
|
||||
url = href,
|
||||
uploadDate = 0L,
|
||||
source = source,
|
||||
scanlator = null,
|
||||
branch = null,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,58 @@
|
||||
package org.koitharu.kotatsu.parsers.util
|
||||
|
||||
import androidx.collection.ArrayMap
|
||||
import androidx.collection.ArraySet
|
||||
import java.util.*
|
||||
|
||||
fun <T> MutableCollection<T>.replaceWith(subject: Iterable<T>) {
|
||||
clear()
|
||||
addAll(subject)
|
||||
}
|
||||
|
||||
fun <T> List<T>.medianOrNull(): T? = when {
|
||||
isEmpty() -> null
|
||||
else -> get((size / 2).coerceIn(indices))
|
||||
}
|
||||
|
||||
inline fun <T, R> Collection<T>.mapToSet(transform: (T) -> R): Set<R> {
|
||||
return mapTo(ArraySet(size), transform)
|
||||
}
|
||||
|
||||
inline fun <T, R> Collection<T>.mapNotNullToSet(transform: (T) -> R?): Set<R> {
|
||||
val destination = ArraySet<R>(size)
|
||||
for (item in this) {
|
||||
destination.add(transform(item) ?: continue)
|
||||
}
|
||||
return destination
|
||||
}
|
||||
|
||||
inline fun <T, reified R> Array<T>.mapToArray(transform: (T) -> R): Array<R> = Array(size) { i ->
|
||||
transform(get(i))
|
||||
}
|
||||
|
||||
fun <K, V> List<Pair<K, V>>.toMutableMap(): MutableMap<K, V> = toMap(ArrayMap(size))
|
||||
fun <T : Enum<T>> Array<T>.names() = mapToArray { it.name }
|
||||
|
||||
fun <T> MutableList<T>.move(sourceIndex: Int, targetIndex: Int) {
|
||||
if (sourceIndex <= targetIndex) {
|
||||
Collections.rotate(subList(sourceIndex, targetIndex + 1), -1)
|
||||
} else {
|
||||
Collections.rotate(subList(targetIndex, sourceIndex + 1), 1)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun <T> List<T>.areItemsEquals(other: List<T>, equals: (T, T) -> Boolean): Boolean {
|
||||
if (size != other.size) {
|
||||
return false
|
||||
}
|
||||
for (i in indices) {
|
||||
val a = this[i]
|
||||
val b = other[i]
|
||||
if (!equals(a, b)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
fun <T> Iterator<T>.nextOrNull(): T? = if (hasNext()) next() else null
|
||||
@ -0,0 +1,40 @@
|
||||
package org.koitharu.kotatsu.parsers.util
|
||||
|
||||
import okhttp3.Cookie
|
||||
import okhttp3.CookieJar
|
||||
import okhttp3.HttpUrl
|
||||
|
||||
private const val SCHEME_HTTPS = "https"
|
||||
|
||||
fun CookieJar.insertCookies(domain: String, vararg cookies: String) {
|
||||
val url = HttpUrl.Builder()
|
||||
.scheme(SCHEME_HTTPS)
|
||||
.host(domain)
|
||||
.build()
|
||||
saveFromResponse(
|
||||
url,
|
||||
cookies.mapNotNull {
|
||||
Cookie.parse(url, it)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fun CookieJar.getCookies(domain: String): List<Cookie> {
|
||||
val url = HttpUrl.Builder()
|
||||
.scheme(SCHEME_HTTPS)
|
||||
.host(domain)
|
||||
.build()
|
||||
return loadForRequest(url)
|
||||
}
|
||||
|
||||
fun CookieJar.copyCookies(oldDomain: String, newDomain: String, names: Array<String>? = null) {
|
||||
val url = HttpUrl.Builder()
|
||||
.scheme(SCHEME_HTTPS)
|
||||
.host(oldDomain)
|
||||
var cookies = loadForRequest(url.build())
|
||||
if (names != null) {
|
||||
cookies = cookies.filter { c -> c.name in names }
|
||||
}
|
||||
url.host(newDomain)
|
||||
saveFromResponse(url.build(), cookies)
|
||||
}
|
||||
@ -0,0 +1,36 @@
|
||||
package org.koitharu.kotatsu.parsers.util
|
||||
|
||||
import kotlinx.coroutines.suspendCancellableCoroutine
|
||||
import okhttp3.Call
|
||||
import okhttp3.Callback
|
||||
import okhttp3.Response
|
||||
import java.io.IOException
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
|
||||
suspend fun Call.await() = suspendCancellableCoroutine<Response> { cont ->
|
||||
this.enqueue(
|
||||
object : Callback {
|
||||
override fun onFailure(call: Call, e: IOException) {
|
||||
if (cont.isActive) {
|
||||
cont.resumeWithException(e)
|
||||
}
|
||||
}
|
||||
|
||||
override fun onResponse(call: Call, response: Response) {
|
||||
if (cont.isActive) {
|
||||
cont.resume(response)
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
cont.invokeOnCancellation {
|
||||
this.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
val Response.mimeType: String?
|
||||
get() = body?.contentType()?.run { "$type/$subtype" }
|
||||
|
||||
val Response.contentDisposition: String?
|
||||
get() = header("Content-Disposition")
|
||||
@ -0,0 +1,109 @@
|
||||
package org.koitharu.kotatsu.parsers.util
|
||||
|
||||
import okhttp3.Response
|
||||
import okhttp3.internal.closeQuietly
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import org.jsoup.Jsoup
|
||||
import org.jsoup.internal.StringUtil
|
||||
import org.jsoup.nodes.Document
|
||||
import org.jsoup.nodes.Element
|
||||
import org.jsoup.nodes.Node
|
||||
import org.jsoup.select.Elements
|
||||
import java.text.DateFormat
|
||||
|
||||
fun Response.parseHtml(): Document {
|
||||
try {
|
||||
(body?.byteStream() ?: throw NullPointerException("Response body is null")).use { stream ->
|
||||
val charset = body!!.contentType()?.charset()?.name()
|
||||
return Jsoup.parse(
|
||||
stream,
|
||||
charset,
|
||||
request.url.toString(),
|
||||
)
|
||||
}
|
||||
} finally {
|
||||
closeQuietly()
|
||||
}
|
||||
}
|
||||
|
||||
fun Response.parseJson(): JSONObject {
|
||||
try {
|
||||
val string = body?.string() ?: throw NullPointerException("Response body is null")
|
||||
return JSONObject(string)
|
||||
} finally {
|
||||
closeQuietly()
|
||||
}
|
||||
}
|
||||
|
||||
fun Response.parseJsonArray(): JSONArray {
|
||||
try {
|
||||
val string = body?.string() ?: throw NullPointerException("Response body is null")
|
||||
return JSONArray(string)
|
||||
} finally {
|
||||
closeQuietly()
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Elements.findOwnText(predicate: (String) -> Boolean): String? {
|
||||
for (x in this) {
|
||||
val ownText = x.ownText()
|
||||
if (predicate(ownText)) {
|
||||
return ownText
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
inline fun Elements.findText(predicate: (String) -> Boolean): String? {
|
||||
for (x in this) {
|
||||
val text = x.text()
|
||||
if (predicate(text)) {
|
||||
return text
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
fun String.inContextOf(node: Node): String {
|
||||
return if (this.isEmpty()) {
|
||||
""
|
||||
} else {
|
||||
StringUtil.resolve(node.baseUri(), this)
|
||||
}
|
||||
}
|
||||
|
||||
fun String.toRelativeUrl(domain: String): String {
|
||||
if (isEmpty() || startsWith("/")) {
|
||||
return this
|
||||
}
|
||||
return replace(Regex("^[^/]{2,6}://${Regex.escape(domain)}+/", RegexOption.IGNORE_CASE), "/")
|
||||
}
|
||||
|
||||
fun Element.relUrl(attributeKey: String): String {
|
||||
val attr = attr(attributeKey).trim()
|
||||
if (attr.isEmpty()) {
|
||||
return ""
|
||||
}
|
||||
if (attr.startsWith("/")) {
|
||||
return attr
|
||||
}
|
||||
val baseUrl = REGEX_URL_BASE.find(baseUri())?.value ?: return attr
|
||||
return attr.removePrefix(baseUrl.dropLast(1))
|
||||
}
|
||||
|
||||
private val REGEX_URL_BASE = Regex("^[^/]{2,6}://[^/]+/", RegexOption.IGNORE_CASE)
|
||||
|
||||
fun Element.css(property: String): String? {
|
||||
val regex = Regex("${Regex.escape(property)}\\s*:\\s*[^;]+")
|
||||
val css = attr("style").find(regex) ?: return null
|
||||
return css.substringAfter(':').removeSuffix(';').trim()
|
||||
}
|
||||
|
||||
fun DateFormat.tryParse(str: String?): Long = if (str.isNullOrEmpty()) {
|
||||
0L
|
||||
} else {
|
||||
runCatching {
|
||||
parse(str)?.time ?: 0L
|
||||
}.getOrDefault(0L)
|
||||
}
|
||||
@ -0,0 +1,44 @@
|
||||
package org.koitharu.kotatsu.parsers.util
|
||||
|
||||
import java.text.DecimalFormat
|
||||
import java.text.NumberFormat
|
||||
import java.util.*
|
||||
|
||||
fun Number.format(decimals: Int = 0, decPoint: Char = '.', thousandsSep: Char? = ' '): String {
|
||||
val formatter = NumberFormat.getInstance(Locale.US) as DecimalFormat
|
||||
val symbols = formatter.decimalFormatSymbols
|
||||
if (thousandsSep != null) {
|
||||
symbols.groupingSeparator = thousandsSep
|
||||
formatter.isGroupingUsed = true
|
||||
} else {
|
||||
formatter.isGroupingUsed = false
|
||||
}
|
||||
symbols.decimalSeparator = decPoint
|
||||
formatter.decimalFormatSymbols = symbols
|
||||
formatter.minimumFractionDigits = decimals
|
||||
formatter.maximumFractionDigits = decimals
|
||||
return when (this) {
|
||||
is Float,
|
||||
is Double,
|
||||
-> formatter.format(this.toDouble())
|
||||
else -> formatter.format(this.toLong())
|
||||
}
|
||||
}
|
||||
|
||||
fun Float.toIntUp(): Int {
|
||||
val intValue = toInt()
|
||||
return if (this == intValue.toFloat()) {
|
||||
intValue
|
||||
} else {
|
||||
intValue + 1
|
||||
}
|
||||
}
|
||||
|
||||
infix fun Int.upBy(step: Int): Int {
|
||||
val mod = this % step
|
||||
return if (mod == 0) {
|
||||
this
|
||||
} else {
|
||||
this - mod + step
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,14 @@
|
||||
package org.koitharu.kotatsu.utils.json
|
||||
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
|
||||
class JSONIterator(private val array: JSONArray) : Iterator<JSONObject> {
|
||||
|
||||
private val total = array.length()
|
||||
private var index = 0
|
||||
|
||||
override fun hasNext() = index < total
|
||||
|
||||
override fun next(): JSONObject = array.getJSONObject(index++)
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
package org.koitharu.kotatsu.utils.json
|
||||
|
||||
import org.json.JSONArray
|
||||
|
||||
class JSONStringIterator(private val array: JSONArray) : Iterator<String> {
|
||||
|
||||
private val total = array.length()
|
||||
private var index = 0
|
||||
|
||||
override fun hasNext() = index < total
|
||||
|
||||
override fun next(): String = array.getString(index++)
|
||||
}
|
||||
@ -0,0 +1,99 @@
|
||||
package org.koitharu.kotatsu.parsers.util.json
|
||||
|
||||
import androidx.collection.ArraySet
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import org.koitharu.kotatsu.utils.json.JSONIterator
|
||||
import org.koitharu.kotatsu.utils.json.JSONStringIterator
|
||||
import org.koitharu.kotatsu.utils.json.JSONValuesIterator
|
||||
import kotlin.contracts.contract
|
||||
|
||||
inline fun <R, C : MutableCollection<in R>> JSONArray.mapJSONTo(
|
||||
destination: C,
|
||||
block: (JSONObject) -> R,
|
||||
): C {
|
||||
val len = length()
|
||||
for (i in 0 until len) {
|
||||
val jo = getJSONObject(i)
|
||||
destination.add(block(jo))
|
||||
}
|
||||
return destination
|
||||
}
|
||||
|
||||
inline fun <R, C : MutableCollection<in R>> JSONArray.mapJSONNotNullTo(
|
||||
destination: C,
|
||||
block: (JSONObject) -> R?,
|
||||
): C {
|
||||
val len = length()
|
||||
for (i in 0 until len) {
|
||||
val jo = getJSONObject(i)
|
||||
destination.add(block(jo) ?: continue)
|
||||
}
|
||||
return destination
|
||||
}
|
||||
|
||||
inline fun <T> JSONArray.mapJSON(block: (JSONObject) -> T): List<T> {
|
||||
return mapJSONTo(ArrayList(length()), block)
|
||||
}
|
||||
|
||||
inline fun <T> JSONArray.mapJSONNotNull(block: (JSONObject) -> T?): List<T> {
|
||||
return mapJSONNotNullTo(ArrayList(length()), block)
|
||||
}
|
||||
|
||||
fun <T> JSONArray.mapJSONIndexed(block: (Int, JSONObject) -> T): List<T> {
|
||||
val len = length()
|
||||
val result = ArrayList<T>(len)
|
||||
for (i in 0 until len) {
|
||||
val jo = getJSONObject(i)
|
||||
result.add(block(i, jo))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
fun JSONObject.getStringOrNull(name: String): String? = opt(name)?.takeUnless {
|
||||
it === JSONObject.NULL
|
||||
}?.toString()?.takeUnless {
|
||||
it.isEmpty()
|
||||
}
|
||||
|
||||
fun JSONObject.getBooleanOrDefault(name: String, defaultValue: Boolean): Boolean = opt(name)?.takeUnless {
|
||||
it === JSONObject.NULL
|
||||
} as? Boolean ?: defaultValue
|
||||
|
||||
fun JSONObject.getLongOrDefault(name: String, defaultValue: Long): Long = opt(name)?.takeUnless {
|
||||
it === JSONObject.NULL
|
||||
} as? Long ?: defaultValue
|
||||
|
||||
fun JSONArray.JSONIterator(): Iterator<JSONObject> = JSONIterator(this)
|
||||
|
||||
fun JSONArray.stringIterator(): Iterator<String> = JSONStringIterator(this)
|
||||
|
||||
fun <T> JSONArray.mapJSONToSet(block: (JSONObject) -> T): Set<T> {
|
||||
val len = length()
|
||||
val result = ArraySet<T>(len)
|
||||
for (i in 0 until len) {
|
||||
val jo = getJSONObject(i)
|
||||
result.add(block(jo))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
fun JSONObject.values(): Iterator<Any> = JSONValuesIterator(this)
|
||||
|
||||
fun JSONArray.associateByKey(key: String): Map<String, JSONObject> {
|
||||
val destination = LinkedHashMap<String, JSONObject>(length())
|
||||
repeat(length()) { i ->
|
||||
val item = getJSONObject(i)
|
||||
val keyValue = item.getString(key)
|
||||
destination[keyValue] = item
|
||||
}
|
||||
return destination
|
||||
}
|
||||
|
||||
fun JSONArray?.isNullOrEmpty(): Boolean {
|
||||
contract {
|
||||
returns(false) implies (this@isNullOrEmpty != null)
|
||||
}
|
||||
|
||||
return this == null || this.length() == 0
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package org.koitharu.kotatsu.utils.json
|
||||
|
||||
import org.json.JSONObject
|
||||
|
||||
class JSONValuesIterator(
|
||||
private val jo: JSONObject,
|
||||
) : Iterator<Any> {
|
||||
|
||||
private val keyIterator = jo.keys()
|
||||
|
||||
override fun hasNext(): Boolean = keyIterator.hasNext()
|
||||
|
||||
override fun next(): Any {
|
||||
val key = keyIterator.next()
|
||||
return jo.get(key)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,27 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import okhttp3.Cookie
|
||||
import okhttp3.CookieJar
|
||||
import okhttp3.HttpUrl
|
||||
|
||||
class InMemoryCookieJar : CookieJar {
|
||||
|
||||
private val cache = HashMap<CookieKey, Cookie>()
|
||||
|
||||
override fun loadForRequest(url: HttpUrl): List<Cookie> {
|
||||
val time = System.currentTimeMillis()
|
||||
return cache.values.filter { it.matches(url) && it.expiresAt >= time }
|
||||
}
|
||||
|
||||
override fun saveFromResponse(url: HttpUrl, cookies: List<Cookie>) {
|
||||
cookies.forEach {
|
||||
val key = CookieKey(url.host, it.name)
|
||||
cache[key] = it
|
||||
}
|
||||
}
|
||||
|
||||
private data class CookieKey(
|
||||
val host: String,
|
||||
val name: String,
|
||||
)
|
||||
}
|
||||
@ -0,0 +1,51 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import com.koushikdutta.quack.QuackContext
|
||||
import okhttp3.CookieJar
|
||||
import okhttp3.OkHttpClient
|
||||
import okhttp3.Request
|
||||
import okhttp3.Response
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.util.await
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
internal class MangaLoaderContextMock : MangaLoaderContext() {
|
||||
|
||||
private val userAgent = "Kotatsu/%s (Android %s; %s; %s %s; %s)".format(
|
||||
/*BuildConfig.VERSION_NAME*/ "3.0",
|
||||
/*Build.VERSION.RELEASE*/ "r",
|
||||
/*Build.MODEL*/ "",
|
||||
/*Build.BRAND*/ "",
|
||||
/*Build.DEVICE*/ "",
|
||||
/*Locale.getDefault().language*/"en",
|
||||
)
|
||||
|
||||
override val cookieJar: CookieJar = InMemoryCookieJar()
|
||||
|
||||
override val httpClient: OkHttpClient = OkHttpClient.Builder()
|
||||
.cookieJar(cookieJar)
|
||||
.addInterceptor(UserAgentInterceptor(userAgent))
|
||||
.connectTimeout(20, TimeUnit.SECONDS)
|
||||
.readTimeout(60, TimeUnit.SECONDS)
|
||||
.writeTimeout(20, TimeUnit.SECONDS)
|
||||
.build()
|
||||
|
||||
override suspend fun evaluateJs(script: String): String? {
|
||||
return QuackContext.create().use {
|
||||
it.evaluate(script)?.toString()
|
||||
}
|
||||
}
|
||||
|
||||
override fun getConfig(source: MangaSource): MangaSourceConfig {
|
||||
return SourceConfigMock()
|
||||
}
|
||||
|
||||
suspend fun doRequest(url: String, builder: Request.Builder.() -> Unit): Response {
|
||||
val request = Request.Builder()
|
||||
.get()
|
||||
.url(url)
|
||||
.apply(builder)
|
||||
.build()
|
||||
return httpClient.newCall(request).await()
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,116 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import kotlinx.coroutines.test.runTest
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.koitharu.kotatsu.parsers.model.Manga
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
import org.koitharu.kotatsu.parsers.model.SortOrder
|
||||
import org.koitharu.kotatsu.parsers.util.medianOrNull
|
||||
import org.koitharu.kotatsu.parsers.util.mimeType
|
||||
import org.koitharu.kotatsu.test_util.isDistinct
|
||||
import org.koitharu.kotatsu.test_util.isDistinctBy
|
||||
import org.koitharu.kotatsu.test_util.isUrlAbsoulte
|
||||
|
||||
internal class MangaParserTest {
|
||||
|
||||
private val context = MangaLoaderContextMock()
|
||||
|
||||
@ParameterizedTest
|
||||
@MangaSources
|
||||
fun list(source: MangaSource) = runTest {
|
||||
val parser = source.newParser(context)
|
||||
val list = parser.getList(20, query = null, sortOrder = SortOrder.POPULARITY, tags = null)
|
||||
checkMangaList(list)
|
||||
assert(list.all { it.source == source })
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MangaSources
|
||||
fun search(source: MangaSource) = runTest {
|
||||
val parser = source.newParser(context)
|
||||
val subject = parser.getList(20, query = null, sortOrder = SortOrder.POPULARITY, tags = null).minByOrNull {
|
||||
it.title.length
|
||||
} ?: error("No manga found")
|
||||
val list = parser.getList(offset = 0, query = subject.title, sortOrder = null, tags = null)
|
||||
assert(list.singleOrNull { it.url == subject.url && it.id == subject.id } != null) {
|
||||
"Single subject ${subject.title} not found in search results"
|
||||
}
|
||||
checkMangaList(list)
|
||||
assert(list.all { it.source == source })
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MangaSources
|
||||
fun tags(source: MangaSource) = runTest {
|
||||
val parser = source.newParser(context)
|
||||
val tags = parser.getTags()
|
||||
assert(tags.isNotEmpty())
|
||||
val keys = tags.map { it.key }
|
||||
assert(keys.isDistinct())
|
||||
assert("" !in keys)
|
||||
val titles = tags.map { it.title }
|
||||
assert(titles.isDistinct())
|
||||
assert("" !in titles)
|
||||
assert(tags.all { it.source == source })
|
||||
|
||||
val list = parser.getList(offset = 0, tags = setOf(tags.last()), query = null, sortOrder = null)
|
||||
checkMangaList(list)
|
||||
assert(list.all { it.source == source })
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MangaSources
|
||||
fun details(source: MangaSource) = runTest {
|
||||
val parser = source.newParser(context)
|
||||
val list = parser.getList(20, query = null, sortOrder = SortOrder.POPULARITY, tags = null)
|
||||
val manga = list[3]
|
||||
parser.getDetails(manga).apply {
|
||||
assert(!chapters.isNullOrEmpty())
|
||||
assert(publicUrl.isUrlAbsoulte())
|
||||
assert(description != null)
|
||||
assert(title.startsWith(manga.title))
|
||||
assert(this.source == source)
|
||||
val c = checkNotNull(chapters)
|
||||
assert(c.isDistinctBy { it.id })
|
||||
assert(c.isDistinctBy { it.number })
|
||||
assert(c.isDistinctBy { it.name })
|
||||
assert(c.all { it.source == source })
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MangaSources
|
||||
fun pages(source: MangaSource) = runTest {
|
||||
val parser = source.newParser(context)
|
||||
val list = parser.getList(20, query = null, sortOrder = SortOrder.POPULARITY, tags = null)
|
||||
val manga = list.first()
|
||||
val chapter = parser.getDetails(manga).chapters?.firstOrNull() ?: error("Chapter is null")
|
||||
val pages = parser.getPages(chapter)
|
||||
|
||||
assert(pages.isNotEmpty())
|
||||
assert(pages.isDistinctBy { it.id })
|
||||
assert(pages.all { it.source == source })
|
||||
|
||||
val page = pages.medianOrNull() ?: error("No page")
|
||||
val pageUrl = parser.getPageUrl(page)
|
||||
assert(pageUrl.isNotEmpty())
|
||||
assert(pageUrl.isUrlAbsoulte())
|
||||
val pageResponse = context.doRequest(pageUrl) {
|
||||
header("Referrer", page.referer)
|
||||
}
|
||||
assert(pageResponse.isSuccessful)
|
||||
assert(pageResponse.mimeType?.startsWith("image/") == true)
|
||||
}
|
||||
|
||||
private fun checkMangaList(list: List<Manga>) {
|
||||
assert(list.isNotEmpty()) { "Manga list is empty" }
|
||||
assert(list.isDistinctBy { it.id }) { "Manga list contains duplicated ids" }
|
||||
for (item in list) {
|
||||
assert(item.url.isNotEmpty())
|
||||
assert(!item.url.isUrlAbsoulte())
|
||||
assert(item.coverUrl.isUrlAbsoulte())
|
||||
assert(item.title.isNotEmpty())
|
||||
assert(item.publicUrl.isUrlAbsoulte())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,7 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import org.junit.jupiter.params.provider.EnumSource
|
||||
import org.koitharu.kotatsu.parsers.model.MangaSource
|
||||
|
||||
@EnumSource(MangaSource::class, names = ["LOCAL"], mode = EnumSource.Mode.EXCLUDE)
|
||||
internal annotation class MangaSources()
|
||||
@ -0,0 +1,8 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
internal class SourceConfigMock : MangaSourceConfig {
|
||||
|
||||
override fun getDomain(defaultValue: String): String = defaultValue
|
||||
|
||||
override fun isSslEnabled(defaultValue: Boolean): Boolean = defaultValue
|
||||
}
|
||||
@ -0,0 +1,20 @@
|
||||
package org.koitharu.kotatsu.parsers
|
||||
|
||||
import okhttp3.Interceptor
|
||||
import okhttp3.Response
|
||||
|
||||
private const val HEADER_USER_AGENT = "User-Agent"
|
||||
internal class UserAgentInterceptor(
|
||||
private val userAgent: String,
|
||||
) : Interceptor {
|
||||
|
||||
override fun intercept(chain: Interceptor.Chain): Response {
|
||||
val request = chain.request()
|
||||
val newRequest = if (request.header(HEADER_USER_AGENT) == null) {
|
||||
request.newBuilder().header(HEADER_USER_AGENT, userAgent).build()
|
||||
} else {
|
||||
request
|
||||
}
|
||||
return chain.proceed(newRequest)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,29 @@
|
||||
package org.koitharu.kotatsu.test_util
|
||||
|
||||
import androidx.collection.ArraySet
|
||||
|
||||
private val PATTERN_URL_ABSOLUTE = Regex("https?://\\S+", setOf(RegexOption.IGNORE_CASE))
|
||||
private val PATTERN_URL_RELATIVE = Regex("^/\\S+", setOf(RegexOption.IGNORE_CASE))
|
||||
|
||||
internal fun <T> Collection<T>.isDistinct(): Boolean {
|
||||
val set = ArraySet<T>(size)
|
||||
for (item in this) {
|
||||
if (!set.add(item)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return set.size == size
|
||||
}
|
||||
|
||||
internal fun <T, K> Collection<T>.isDistinctBy(selector: (T) -> K): Boolean {
|
||||
val set = ArraySet<K>(size)
|
||||
for (item in this) {
|
||||
if (!set.add(selector(item))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return set.size == size
|
||||
}
|
||||
|
||||
internal fun String.isUrlRelative() = matches(PATTERN_URL_RELATIVE)
|
||||
internal fun String.isUrlAbsoulte() = matches(PATTERN_URL_ABSOLUTE)
|
||||
Loading…
Reference in New Issue