Compare commits

...

1086 Commits

Author SHA1 Message Date
Koitharu fe5534b006
Remove DUMMY parser 8 months ago
Sergey Shumov 6ca07aeff7
ZenManga: update site domain (#2168) 8 months ago
Koitharu 19567f9642
Update dependencies 8 months ago
Koitharu 4a854c7a23
Update docs 8 months ago
Draken 312b617b95
Cứu Truyện: Use another publicUrl for better redirection 8 months ago
Koitharu 0a0a869e88
Merge pull request #2166 from nakrovati/refactor/migrate-gradle-scripts-to-kotlin-dsl 8 months ago
Hoàng Phi Hùng 8573921243
GocTruyenTranhVui + KuroNeko: Fixes (#2165)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
8 months ago
Draken b8d3124227
Update Gradle 8.7 to 8.13 (#2133) 8 months ago
Sergey Shumov 14e23694fd
FIx ZenManga manga page url (#2164) 8 months ago
Daniil Zhuravlev 7e25002dba refactor(build): migrate to Kotlin DSL and version catalog 8 months ago
Daniil Zhuravlev a94d9bf8b8 chore: update .editorconfig 8 months ago
nakrovati 7430c7386e
Mark broken parsers and update several domains (#2163)
* chore: update outdated domains

* chore: mark broken parsers
8 months ago
Naga 5f99e694c8
MediocreToons: Fix chapter order (#2162) 8 months ago
dragonx943 25b23854d8
Góc Truyện Tranh Vui: Add source 8 months ago
dragonx943 45d39fe94d
KuroNeko: Add delay to avoid rate limit 8 months ago
dragonx943 21b63c1d77
TruyenQQ: Add delay to avoid rate limit 8 months ago
Draken f64ce0f4a2
Dâm Cô Nương: Remove redundant escape character (#2160) 8 months ago
Naga b3e7d8e8d5
SussyScan: Fix CDN url in getPages (#2156) 8 months ago
Hoàng Phi Hùng 8a147dbdd3
Update getpage for DamCoNuong parser (#2149)
* Update DamCoNuong.kt

* Update DamCoNuong.kt to fix "Can't find fallbackUrls"

* Update DamCoNuong.kt
8 months ago
Hoàng Phi Hùng 5a039acf82
Dâm Cô Nương: Fix unsupported format in getPages (#2148) 8 months ago
Draken e9469ae0e0
Dâm Cô Nương: Update domain 8 months ago
Naga 48cfc3347b
TraduccionesAmistosas: Update domain (#2145) 8 months ago
nakrovati 1566b807b7
Update domains and correct the source language (#2132) 8 months ago
Naga 8bed45dd2f
Komikcast: Fix SortOrder (#2124) 8 months ago
Waranim 36c3a88d63
ZenManga: Add source (#2123) 8 months ago
Koitharu 16f5129b69
[LibSocial] Fix Not found error 8 months ago
dragonx943 674290d029
MimiHentai: Add Broken, need to open WebView to read 8 months ago
bivaly 6cb6873fcb
GhostScan: Update site domain (#2118) 8 months ago
fuckpdf 705a684cc2
UzayManga: Fix getPages (#2086) 8 months ago
Naga de76ccf753
Weeb Central: Update tags selector (#2087) 8 months ago
Naga cbd74dd7df
AnimeSama: Add source (#2080)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
8 months ago
dragonx943 7f39798a8c
Yuri Garden: Add intercept, apply unscrambleImage for getPages 9 months ago
Naga 31999d97a0
Hentaman: Add source (#2078) 9 months ago
dragonx943 fd0df2414e
MimiHentai: Use another UserAgent key, fix query in getListPage 9 months ago
Naga a93140fc36
MediocreToons: Add source (#2077) 9 months ago
Naga 7c264b6af7
SussyScan: Fixes (#2076) 9 months ago
Naga 134656b835
InManga: Add source (#2073) 9 months ago
Naga 8964bb1584
Manhwalist.org: Update domain (#2072) 9 months ago
Naga 8bee71342b
PhenixScans: Fixes (#2071) 9 months ago
Naga a4824a3582
RaijinScans: Fixes (#2070) 9 months ago
Draken 6fb4cc608c
ci: Update dependencies for actions (#2065) 9 months ago
dragonx943 12aa4479cd
ci: Update dependencies for actions 9 months ago
Draken bfdb10d002
Dâm Cô Nương: Fix exclude tags, CSS selectors for parseMangaList (#2064) 9 months ago
Naga f61f5329e3
ArabsHentai: Add source (#2063) 9 months ago
dragonx943 0477fe0659
site/vi: Update sources domain + Favicons 9 months ago
dragonx943 6b3329436f
Làng Geek: Set CHROME_DESKTOP as default UserAgent key 9 months ago
Draken 35fd5042a7
Làng Geek: Fix CSS selector for getPages, attempt 1 9 months ago
Naga d366628a20
HeyToon: Add source (#2060) 9 months ago
Naga 5483453f35
RocksManga: Update site structure (#2059) 9 months ago
Naga 02ac1cb896
Ikiru: Update site structure (#2058) 9 months ago
dragonx943 1563783afe
ci: Use another module for Gradle, try to improve build times 9 months ago
Naga f5436977a9
MangaGg: Fix chapter list (#2056) 9 months ago
dragonx943 b58cf32b7a
ci: Upgrade actions version and split steps 9 months ago
dragonx943 38025a6b35
ci: Change Java version, use compileKotlin instead of assemble 9 months ago
Draken 96a2d9e0ac
ci: Add workflow to automatically test and compile parsers 9 months ago
Draken c9ebbfa1bf
LxManga: Throw Exception for restricted content 9 months ago
Naga 91ec95448c
MyReadingManga: Update site structure (#2054) 9 months ago
Naga b5512e7574
[KdtScan] Update domain + Fixes (#2050)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
Co-authored-by: dragonx943 <premieregirl26@gmail.com>
9 months ago
Naga 8bc7480d84
[PerfScan] Update domain + Fixes (#2047) 9 months ago
Naga db9a7161f4
[PoseidonScans] Add source (#2042)
Close #1641
9 months ago
Koitharu 74d0951d3d
[Remanga] Fix chapters parsing 9 months ago
Koitharu 91d5eff20a
[Remanga] Migrate to api v2 9 months ago
Draken 54d6b2a2bb
[Làng Geek] Add source (#2041) 9 months ago
Naga 66cd27a673
[Manhastro] Fix chapter list (#2040)
Close #1376 #1362
9 months ago
Naga 137490001a
[HuntersScan] Update domain and fixes (#2039)
Close #2038
9 months ago
Naga c85dac5e0e
[ManhuaScan] Fixes (#2037)
Close #1418 #1730 #1922
9 months ago
dragonx943 9a9102fa33
[LXManga] Small fix for scanlator 9 months ago
Naga 0d31adb20b
[HentaiRead] Fix search, tags, rating (#2036)
Close #1945
9 months ago
Draken df7f2d1a89
[LXManga] Fix state + scanlator 9 months ago
dragonx943 eaca421132
[GocTruyenTranh] Use API instead of fixed tags function 9 months ago
Naga 5f1f2df7f0
[ManhwaBreakup] Add source (#2034)
Close #1967
9 months ago
Naga ed21a5b397
[DoujinHentai] Fixes (#2030)
Close #1983
9 months ago
Draken 39703e3ebb
[Ổ Truyện] Small fixes (#2027) 9 months ago
Naga 14cff0d651
[RokariComics] Add source (#2023)
Close #2008
9 months ago
Naga af89f42251
[Komiku] Fixes (#2022)
Close #1986
9 months ago
Naga 036ce155ae
[FlameComics] Fix image loading (#2020)
Close #2014
9 months ago
Naga 177d014b13
[Kiryuu] Update domain (#2021) 9 months ago
dragonx943 7c74ff6177
[EleceedTurkiye] Fix warn 9 months ago
Naga b139586df5
[KomikIndo] Update domain and structure (#2017)
Close #723 #1466
9 months ago
dragonx943 15c7f97432
[HentaiVN.plus] Add author search 9 months ago
Naga f22c8f9e7c
[TemakiMangas] Small fixes
Close #1997
9 months ago
dragonx943 b24baf7d93
[site/vi] Update domain 9 months ago
Draken 98e0f6cd6c
[Ổ Truyện] Add source (#2012) 9 months ago
Naga e7316b5cd0
[InovaScanManga] Remake parser (Close #1988) 9 months ago
Naga 33a0a68ac7
[Klz9] Fixes (close #1949, #1433, #1455) 9 months ago
Koitharu a83baf4c12
[Grouple] Handle restricted state 9 months ago
Koitharu 307f2090f6
[LibSocial] Improvements 9 months ago
Naga e73d636979
[KomikCast] Fixes (Close #1995) 9 months ago
Naga 77fb7270ce
[RimuScans] Update domain (Close #2005) 9 months ago
Koitharu 9d35f26252
Fix warnings (refactor) 9 months ago
Koitharu 4eeb879451
Fix build 9 months ago
Koitharu 06aa701ab1
Update MangaState enum 9 months ago
Draken 3c55b68beb
[EleceedTurkiye] Add source (#2006) 9 months ago
dragonx943 63f37d159a
[MimiHentai] Fix sort orders 9 months ago
dragonx943 cb74c43f71
[MimiHentai] Temporarily remove "POPULARITY_MONTH" 9 months ago
dragonx943 798b7570fe
[MimiHentai] Update favicon 9 months ago
Draken 9197b82e0d
[KuroNeko] Rename source 9 months ago
Draken ad45480439
[BatCave] Small fix for getDetails (#1994) 9 months ago
Draken 9493ff1630
[MyReadingManga] Fix tags + search 9 months ago
Draken 8ee46ad2d1
[TruyenHentai18] Small tweak + Refactor (#1992) 9 months ago
dragonx943 ba1b35522c
[TruyenHentai18] Fix attempt 1 9 months ago
dragonx943 41c37c025a
[site/vi] Update domain 9 months ago
Draken 9995657da5
[TruyenHentai18] Fix getPages 9 months ago
dragonx943 1569275cd4
[TruyenHentai18] Remove unused import + Broken 9 months ago
dragonx943 352d998680
[TruyenHentai18] Fix attempt 1 9 months ago
Draken d0b2002a1b
[KuroNeko + CuuTruyen] Small fixes (#1989) 9 months ago
Koitharu 5962efb7df
Update parsers structure (again) 9 months ago
Draken 8b18d1f0f8
[Com-X] Update domain 10 months ago
Draken 97ca214815
[DoujinDesu.click] Update domain 10 months ago
Draken 5d9b56296b
[HentaiVn.buzz + YuriGarden] Fixes (#1968)
* [HentaiVn.buzz] Fix "Nothing found" errors

* [YuriGarden] Add author search support + Small fixes
10 months ago
Batu 66f25c30fc
[Gafeland] Add source (#1963)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
10 months ago
dragonx943 fd197de86a
[LXManga] Fix CSS selector in altTitles 10 months ago
dragonx943 5f7478b33c
[LXManga] Store altTitles + Revert CSS selector 10 months ago
dragonx943 b058477b67
[LXManga] Small fix for altTitles 10 months ago
dragonx943 149d9e5a81
[LXManga] Remove not null constraint for altTitles 10 months ago
Batu 6420def95a
[site/tr] Update domain + Add Broken (#1956)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
10 months ago
fuckpdf 5dae26ce0b
[MangaTilkisi] Add source (#1940)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
10 months ago
Draken b5fddadb18
[LXManga] Small fixes for getDetails (#1955) 10 months ago
Draken 613d1cd1a1
[PhenixScans] Fix API path 10 months ago
fuckpdf a6a72064bb
[TenshiManga] Add source (#1933)
* [UzayManga + ElderManga] Update order

* [TenshiManga] Add source

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
10 months ago
dragonx943 73c0a4f606
[YuriGarden] Revert scanlator for getDetails 10 months ago
Draken 1d2ee20d17
[YuriGarden] Fixes (#1934) 10 months ago
dragonx943 67b556c2eb
[YuriGarden] Small fixes 10 months ago
Draken 3962c87e1d
[YuriGarden] Fix line 10 months ago
dragonx943 e6ca455254
[YuriGarden] Fix order / path + Remove rating 10 months ago
Naga d481ff416f
[Flamecomics] Fix search and genre fitlering (#1923) 10 months ago
Draken b0236e492d
[YuriGarden] Add source (#1924) 10 months ago
dragonx943 497321bfa8
[KuroNeko] Add author search support 10 months ago
Draken eff26f6303
[CBHentai] Add author search + Fix duplicate tags (#1918) 10 months ago
Crono b5bd5b8cdf
[Manhastro + LuraToon] Update domain (#1915) 10 months ago
Draken 4cc15daba3
[Koharu] Add Broken 10 months ago
Koitharu 248d51321a
[Koharu] Refactor UserAgent 10 months ago
Koitharu 7cb9a264de
[LibSocial] Allow to change domain 10 months ago
Draken b58e9ff328
[TopTruyen] Update domain 10 months ago
dragonx943 958e0bed5c
[site/vi] Update domain 10 months ago
ViAnh e8d4143d72
[Koharu] Fix getPages (#1914)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
10 months ago
Naga 079b2346f1
[Webtoon] Fixes (#1912) 10 months ago
Iwakurra 553615b3ff
[HentaiLib] Update domain + allow user to set custom (#1908) 10 months ago
Koitharu f8992457dc
Update domains 10 months ago
Draken 06a9209b9e
[DexHentai] Fix getListPage (close #1901) 10 months ago
Draken 4c0b873013
[Hentalk] Update domain 10 months ago
Koitharu dba7f46f73
[Remanga] Update domain (#1891) 10 months ago
Koitharu 8665186426
[LibSocial] Fix chapters branches 10 months ago
Koitharu b5c6a94311
[ExHentai] Fix pagination 10 months ago
Koitharu 1540021f6f
[Remanga] Fix parsing manga without covers 10 months ago
Koitharu 0e1247476e
[LibSocial] Fix tags in details 10 months ago
SrCrono bf83909349
[FlowerManga] Update domain (#1890) 10 months ago
Draken e398a01f14
[AgsComics] Fix cover images
Close #1886
10 months ago
Draken 0e77938ade
[NewTruyen] Fix getPages
Solved #1604
10 months ago
dragonx943 a4a9a1cf67
[NewTruyen] Fix getPages 10 months ago
Draken 3f9544fdb8
[MimiHentai] Small fix for exclude tags 10 months ago
dragonx943 bd30fc35a1
[WestManga] Add broken 11 months ago
Draken b6f82cc282
Merge pull request #1884 from dragonx943/dev
[1stKissManga] Update domain + Small fix
11 months ago
dragonx943 9222079e98
[1stKissManga] Update domain + Small fix 11 months ago
Draken 49158b99d5
[CManga] Fix logic 11 months ago
Draken 3fe61a09b7
[CManga] Improvement getPages 11 months ago
Draken b82ad1356e
[CManga] Fix chap number (#1880) 11 months ago
dragonx943 b825d06b22
[CManga] Fix chap number 11 months ago
Draken 28f5856f9e
[Desu] Update domain 11 months ago
Draken 19751674ee
[TruyenHentai18] Add broken 11 months ago
Draken a78afba15e
[KuroNeko + TruyenHentai18] Fixes (#1879)
Solved #1683 #1604
11 months ago
Draken 0e946cf84c
[site/vi] Update sources domain (#1874) 11 months ago
Draken 6cd29603ac
[CBHentai] Fix duplicated tags (#1873) 11 months ago
Draken d94cf53624
[MimiHentai] Add exclude tags support (#1872) 11 months ago
Draken c81628c027
[YuriGarden] Removal (#1866, close #1863) 11 months ago
Koitharu bcde8ef2a2
Fix build 11 months ago
Koitharu 66df6caa67
[ExHentai] Improvements 11 months ago
Draken 08e53b2f5f
[MimiHentai] Small fix 11 months ago
Draken 7c83730ad0
[MimiHentai] Fix getListPage 11 months ago
dragonx943 d2a026c054
[MimiHentai] Remove state 11 months ago
dragonx943 faf8f6ffd7
[MimiHentai] Fix getListPage 11 months ago
Draken 6c372548db
[site/vi] Update sources domain (#1861) 11 months ago
Draken 0029697350
Fix line 11 months ago
dragonx943 bdcf27b955
[site/vi] Update sources domain 11 months ago
Draken bb7ac26c0c
[MimiHentai] Small fixes (#1860) 11 months ago
Draken 7b42317996
[MimiHentai] Add Broken 11 months ago
Draken 103b0e7eb6
[ZinChanManga] Update domain
Solved https://github.com/KotatsuApp/kotatsu-parsers/issues/1835
11 months ago
Draken fa80598e7c
[DamCoNuong] Update domain
Solved a task in #1604
11 months ago
Draken 3cb5367028
[madtheme/en + wpcomics/vi] Small fixes (#1830)
* [WpComics] Split NetTruyenX + Small improvements

* [MangaBuddy] Add subdomain

* [MangaPuma] Add subdomain

* [MangaForest] Add subdomain

* [MangaCute] Add subdomain

* [Manhuagui] Change UA to `CHROME_DESKTOP`

* [Mangaxyz] Add subdomain
11 months ago
Zhifan Ye 9417d1f973
[Manhuagui] Fix UA and parse errors (#1834) 11 months ago
mozzaru 173d30737b
[MgKomik] Update domain (#1828) 11 months ago
Draken 0db45877b7
[SolarMTL] Add source (#1826) 11 months ago
Draken e7fac812a7
[SnowMTL] Add source (#1823)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Draken ab26e6b3d0
[TrueManga] Update domain 11 months ago
Draken baafc29967
[MimiHentai] Small fix 11 months ago
Draken b0bd3e99a4
[MimiHentai] Small changes (#1818)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Draken 6bf377a654
[NhatTruyenVN] Fix list chaps (#1816)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Zhifan Ye 3d78b9003d
[Manhuagui] Add source (#1812)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
11 months ago
Draken f57722ec54
[Com-X] Small changes (#1814)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Koitharu 9558a34b00
[Koharu] Fixes 11 months ago
Draken 522e3af41d
[Koharu] Fixes (#1811)
* [Koharu] Debug attempt 1

* [Koharu] Debug attempt 2

* [Koharu] Debug attempt 3

* [Koharu] Fix chapter url

---------

Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Koitharu 7b1a0b8d0d
[LibSocial] Authorization support 11 months ago
Koitharu 5856681753
[LibSocial] Fixes 11 months ago
Draken f066be47fe
[site/vi] Update domain + Small fixes (#1804)
* [VioletScans] Move to "en"

* [site/vi] Update sources domain

* [Madara] Split and allow overwrite for "postData"

--------------------------

Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Draken 884142a15e
[UlasComic] Update domain + Fix getPages (#1803) 11 months ago
Tronik568 d7267b20fd
[MaidScan] Update domain (#1802) 11 months ago
Talkc0n 0537cdd533
[SayHentai] Update domain (#1801) 11 months ago
Draken c5f939df70
[Com-X] Small fixes: Bypass age verification (#1800) 11 months ago
Darwin 6854164e5a
[VioletScans] Add source (#1798)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
11 months ago
Draken 1e942c650b
[HiperDex] Update domain + Remove Broken
Solved #1766
11 months ago
Naga 30a06b67d8
[Hitomi.La] Fix cover images (#1795) 11 months ago
Naga 34a1dabae4
[MagusToon] Move to iken (#1794)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
11 months ago
Draken 23227a0427
[HoloEarth] Fix getDetails + Add locale (#1792)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Draken 02d1aef77c
[Multporn] Add source (#1791)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
11 months ago
Darwin 39ebdacef9
[HoloEarth] Add source (#1788)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
11 months ago
Draken 276349e23c
[YuriGarden] Replace SAFE with SUGGESTIVE 12 months ago
kerimmkirac 0507ed8d59
[DemonicScans] Add source (#1785)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
12 months ago
Nam Huynh 2f8a0fe775
[HentaiNexus + HentaiRead] Small fixes (#1784)
* [HentaiNexus]
+ Fix package (due to moving)
+ Fix getPages() getPageUrl() (add flag for data reloading)
+ Fix getDetails (tag with quotes)
+ Cleaning

* [HentaiRead]
+ Fix package (due to moving)
+ Fix getListPage() (author searching)
+ Re-write getDetails() (update source)
+ Cleaning
12 months ago
Draken aab7e9360a
[site/vi] Update sources domain (#1780)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
12 months ago
Draken 9db443f7f7
[HangTruyen + GocTruyenTranh] Fix getPages (#1778) 12 months ago
Nam Huynh 70ce9eb008
[HentaiNexus + HentaiRead] Small fixes (#1776)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
12 months ago
Draken a58947376c
[Misskon] Add source (#1772) 12 months ago
Draken cbb70f94dd
[Manhwa210] Add source (#1771) 12 months ago
Koitharu 9408b9ba1b
Update MangaLoaderContext 12 months ago
Koitharu 409828d22f
Update MangaLoaderContext 12 months ago
Nam Huynh c9d32a804c
[Nhentai World + MeHentaiVN] Fixes (#1769)
* [HentaiRead] Fix lines

* [MeHentaiVN] Soft re-write due to source changed (#1604)

* [NHentaiWorld] Fixes fetchTags (due to source changed) and pass MangaParserTest:link (#1604)

* [MeHentaiVN] Small changes

* [MeHentaiVN] Update domain

---------

Solve task #1604

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
12 months ago
Draken a65cbf4981
[ReaperComics] Add Broken 12 months ago
mozzaru e325063260
[Apkomik] Add Source (#1765) 12 months ago
Koitharu cf0177364c
Add requestDataFromBrowser function to MangaLoaderContext 12 months ago
Draken 8eaad2270b
[Hitomi.La] Small fix for red covers 12 months ago
Nam Huynh 362f9c6786
[HentaiRead] Add Source! (KotatsuApp#1745) (#1762)
Solved https://github.com/KotatsuApp/kotatsu-parsers/issues/1745

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
12 months ago
Draken 3047949b22
[ru/grouple] Small fixes (#1761)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
12 months ago
Draken 8d7c81c8ef
Merge branch 'master' into master 12 months ago
Nam Huynh 7d6e12d1fc
[HentaiNexus] Add source (#1757)
Solved #1690

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
12 months ago
spozer 7a0f1af06a
[ReadOnePiece] Add source (#1758)
Solved https://github.com/KotatsuApp/kotatsu-parsers/issues/1040

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
12 months ago
Draken 21aaa23586
[MimiHentai] Remove deprecated value 12 months ago
Draken 95fcbd44fd [Kiutaku] Debug attempt 1 12 months ago
Draken cc98931147
[Kiutaku + Xiutaku] Add sources (#1755)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
12 months ago
Draken 8f61beadb2
[Hitomi.la] Small fixes (#1754)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
12 months ago
Draken 6d404c6997
[RoliaScan + ElderManga] Add sources (#1753) 12 months ago
kerimmkirac 27060cfef0
[RoliaScan + ElderManga] Add sources (#1753)
* [RoliaScan] Add source

* [ElderManga] Add source

* [UzayManga] Fix search

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
12 months ago
Draken ca3d8f555a
[BatCave] Remove referer 12 months ago
Draken 7ea59a7bf1
[HeanCms] Small improvements (#1751)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
12 months ago
Draken cea4db9619
[BatCave] Small fixes (#1750)
Solved #1749
12 months ago
Draken e7c3168099
[Readmanga] Add referer (#1748) 12 months ago
Draken b165a0d611
Merge pull request #1744 from dragonx943/debug
[TeamXNovel] Small fixes
1 year ago
Draken 43e6c7c86f [TeamXNovel] Small fixes 1 year ago
Draken 346828402b
[TeamXNovel] Revert: Fix getPages 1 year ago
Draken 01d67eca2e
[site/vi] Update sources domain (#1741)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
1 year ago
Draken 4445b666aa
[site/iken] Small fixes (#1740)
* Vortexscans (Broken): Need to fix list chaps + getPages function

* Temporary fix for #1737, reference: keiyoushi/extensions-source#8645
1 year ago
Draken adf0b4e397
[DayComics] Fix list chaps 1 year ago
Draken e797cabd88
[HotComics] Fix list chaps (#1739)
Solved https://github.com/KotatsuApp/kotatsu-parsers/issues/1717, reference: https://github.com/keiyoushi/extensions-source/pull/5405
1 year ago
Draken 0c32b56378
[NetTruyen1975] Add source (#1736)
* [UzayManga] Fix tags (Attempt 1)

* [NetTruyen1975] Add source

* [NetTruyen*] Fix getPages

---------

Co-authored-by: Draken <dragonx943@users.noreply.github.com>
1 year ago
kerimmkirac 5c9c2c3a7a
[BookManga] Add Source (#1731) 1 year ago
kerimmkirac 7cd7180adb
[ArmoniScans] Add Source (#1729)
Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
1 year ago
kerimmkirac 4b6b585801
[MajorScans] Rename + Update domain (#1725) 1 year ago
Draken 1cf2153df0
[UzayManga] Fixes (#1726) 1 year ago
Draken 6ed104aeca
[AlucardScans] Add source (#1722) 1 year ago
kerimmkirac 9ba1629065
[AlucardScans] Add source (#1722) 1 year ago
kerimmkirac 669709dfbb
[site/tr + en] Add new sources (#1721)
* Add UzayManga, Mangacix, AsuraScansTR, TrMangaOku, MangaKoleji, HiveComic

* [Mangacix] Clean url

* [HiveComic] Add Broken

* [UzayManga] Clean up

* Update summary.yaml

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
1 year ago
kerimmkirac b5cb310aab
[RagnarScans] Add source (#1715)
* Add files via upload

* Update summary.yaml

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Update Ragnarscans.kt

* Remove old parser

* [Ragnarscans] Add source

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
1 year ago
Draken 50771771a8
[TeamXNovel] Fix getPages
Reference: https://github.com/keiyoushi/extensions-source/pull/8624
1 year ago
Draken dce24e2005
[MimiHentai] Small change 1 year ago
Draken d3a61f6556
[MimiHentai] Fix chaps order (#1711)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
1 year ago
Dika Ardianta 8cdd7effe5
[site/id] Update sources domain + Rename (#1705)
- Update sources domain
- Remove dead sources
- Remove duplicate site
- Rename title ManhwaList.in to ManhwaList
- Rename title KomikMirror to KomikLokal

* restore removed files with @Broken marker

Related to commit 60a085843d
1 year ago
kerimmkirac e389b01d35
[MangaGezgini] Update domain (#1710) 1 year ago
Draken 2c9632e3e5
[site/vi] Update sources domain (#1709) 1 year ago
Draken 2fff92b023
[HangTruyen] Add source (#1708)
Solved task #1694
1 year ago
Draken c949b95dab
[YuriGarden] Add source (#1707)
Solved task #1704
1 year ago
Draken da145c7f41
[MangaDistrict] Small change 1 year ago
bivaly 16b8bf9328
[FlameComics] Fixing coverurl (#1697) 1 year ago
Koitharu f076c8095a
Misc parsers fixes 1 year ago
Koitharu d2b2578a3a
[Shinigami] Fixes 1 year ago
Koitharu b24741678c
[Koharu] Fixes 1 year ago
dragonx943 fecb1db3be [Shinigami] Fix search with tags + Refactor 1 year ago
Draken 6418422157
[site/vi] Update sources domain 1 year ago
Draken 7718ff28e7
[Shinigami] Remake parser
* Solved task #1676

* Solved #1654 #1638 #1563 #1535 #1402 #1380 #1306 #1289 #1089
1 year ago
devi 3771b8d26b Fix PhenixScans close #1599
close #1675
Fix KeyoappParser close #1640
1 year ago
Draken c1357b46fb
[NetTruyen] Fix list chaps (#1678) 1 year ago
Draken 6c9f6eaae5
[Shinigami] Add Broken 1 year ago
Draken 309b6405f9
[Koharu] Add source (#1667)
[Koharu] Add source (Broken)
1 year ago
Draken 9058ead462
[Com-X] Add source (#1673) 1 year ago
Draken 162436c913
[BatCave] Fix search (#1672) 1 year ago
Draken 876f3a3d3a
[BatCave] Move to Comic sources 1 year ago
Draken 02acc7e9d4
[CuuTruyen] Add new SortOrder (#1669) 1 year ago
Koitharu e874837efb
Improve some parsers 1 year ago
Draken c294f5bb61
[Hentalk] Small fixes 1 year ago
Draken 826c948260
[Hentalk] Add source (#1665) 1 year ago
Draken a5c70f0b51
[WaManga] Set reverse for list chaps 1 year ago
Draken c2f9c18e29
[YuriNeko] Add Broken 1 year ago
Draken 91cd239c97
[BatCave] Add source (#1658) 1 year ago
Draken 4d255d0d53
[MimiHentai] Add source (#1651)
* [MimiHentai] Add source

* [MimiHentai] Fixes

* [MimiHentai] Clean code

---------

Co-authored-by: Draken <dragonx943@users.noreply.github.com>
1 year ago
Draken d5a1e1a52f
[site/vi] Update sources domain (#1653)
Co-authored-by: Draken <dragonx943@users.noreply.github.com>
1 year ago
Koitharu 9abc80880f
[Desu] Update domain 1 year ago
Draken 8d91adbd6e
[Vcomycs] Update domain - #1644 1 year ago
kerimmkirac 20a24db949
Fixing url of turkish extensions (#1642)
* Update MangaGezgini.kt

* Update MangaTr.kt

* Update Timenaight.kt

* Update Mangacim.kt

* Update MilaSub.kt

* Update Grimelek.kt

* Update Grimelek.kt

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
1 year ago
Koitharu b837659408
[Desu] Fix domains order 1 year ago
MakaMonstir 696e0bc8f6
[Desu] Update domain (#1637)
* Update domain list

* [Desu] Update domain

---------

Co-authored-by: Draken <131387159+dragonx943@users.noreply.github.com>
1 year ago
palaks-1 2ec2484982
[NHentai.xxx] Fix source (#1628) 1 year ago
Draken 738e2649d6
[DocTruyen5s] Small fixes 1 year ago
Draken 8e9da4e39f
[Hitomi.La] Change cdn domain & fix image url logic (#1624)
* [Hitomi.La] Change cdn domain & fix image url logic

* [Hitomi.La] Set null for preview in getPages

* [Hitomi.La] Remove unused function

* [Hitomi.La] Use webp for preview images

* [Hitomi.La] Fix build
1 year ago
Draken da3a9a7d9b
[Phenixscans] Mark as Broken 1 year ago
Draken 8f9c0d93f6
[DocTruyen3Q] Fix chapter images + Improvements (#1621) 1 year ago
Draken 8bb0c4f4f1
[site/vi] Update sources domain 1 year ago
Koitharu dbb04d2051
Fix link resolver 1 year ago
Draken 84562d7b36
[DoujinDesu.click] Update domain
Solved #1590
1 year ago
Draken 29327dafd0
[DamCoNuong] Update domain + Fix images
* [DamCoNuong] Update domain + Fix images
* [DamCoNuong] Fix domain
1 year ago
Draken c0d12b0c83
[CuuTruyen] Fixes + Tweaks
* [Cứu Truyện] Fix chap scanlator

* [Cứu Truyện] Fix search with state + Tags
1 year ago
Koitharu a0041f3c93
Fix build 1 year ago
Koitharu 2175e91db7
[DamCoNuong] Fixes 1 year ago
Draken 5fa7590550
[TopTruyen] Update domain 1 year ago
Draken f3ae64ed1f
[HentaiZ] Update domain 1 year ago
Koitharu cc437b8cd4
[ExHentai] Pages preview support 1 year ago
Draken db0782c61a
Merge pull request #1591 - dragonx943/damconuong/fixes
[DamCoNuong] Fixes
1 year ago
Draken bbf059ff35
[DamCoNuong] Fixes 1 year ago
Draken bebc615376
Merge pull request #1589 - dragonx943/sources/damconuong
[DamCoNuong] Add source
1 year ago
Draken 77616a5404 [DamCoNuong] Add source 1 year ago
Draken 45584f2eea
Merge pull request #1588 - dragonx943/fixes
[TopTruyen] Fix chapter images
1 year ago
dragonx943 1c3e59f9f0 [TopTruyen] Fix chapter images 1 year ago
Draken a88de5b1b6
[TopTruyen] Small changes 1 year ago
Draken 91977ba025
[MyReadingManga] Small changes 1 year ago
Draken 5420290564
[CuuTruyen] Add some tags 1 year ago
Draken 98c21db8da
Merge pull request #1577 - dragonx943/sources/domain
[vi/site] Update sources domain
1 year ago
dragonx943 23953cae11 [vi/site] Update domain 1 year ago
Draken 9cd310ba51
Merge pull request #1576 - dragonx943/sources/doctruyen3q
[TopTruyen + DocTruyen3Q] Fix chapter images
1 year ago
dragonx943 51311a17fb [TopTruyen] Fix chapter images 1 year ago
dragonx943 6799d79754 [DocTruyen3Q] Fix chapter images 1 year ago
Draken e429cdd811
[WeebCentral] Fix title not found 1 year ago
Draken 2e5ce98bac
Merge pull request #1575 - dragonx943/sources/myreadingmanga
[MyReadingManga] Add source
1 year ago
Draken 68c3a7148f [MyReadingManga] Add source 1 year ago
Draken 2a1c5bc6da
[Manga18] Fix title not found 1 year ago
Draken 6fd18543ef
[Manga18] Fix title not found 1 year ago
Koitharu ae76631aaf
[HeanCms] Fix manga ids 1 year ago
Draken ca59e8e577
Merge pull request #1571 - dragonx943/sources/cmanga
[CManga] Small fixes
1 year ago
Draken 3648b36d16
[CManga] Small fixes 1 year ago
Draken 439b7862a3
Merge pull request #1570 - dragonx943/sources/doctruyen5s
[DocTruyen5s] Fixes
1 year ago
dragonx943 f4b975c865 [DocTruyen5s] Fixes 1 year ago
dragonx943 da02a82d74 [DocTruyen5s] Fixes 1 year ago
bivaly 0d5498e60d Update FlameComics.kt 1 year ago
Koitharu 0847baf17b
Fix http request processing 1 year ago
Koitharu adf5794ad3
Expose authorization provider 1 year ago
Draken cfeea09a05
Merge pull request #1567 - dragonx943/sources/domain
[site/vi] Update domain + Tweak
1 year ago
dragonx943 e9bcb1abed [site/vi] Update domain + Tweak 1 year ago
Draken 35cf827c90
Merge pull request #1566 - dragonx943/sources/nhentaiworld
[Nhentai World] Fix tags
1 year ago
dragonx943 a10c34115b [Nhentai World] Fix tags 1 year ago
Draken 2f4e93e391
Merge pull request #1565 - dragonx943/sources/nhentaiworld
[Nhentai World] Fix chapters list
1 year ago
dragonx943 6ed42bc28b [Nhentai World] Fix chapters list 1 year ago
dragonx943 dc7c859344 [Nhentai World] Fix chapters list 1 year ago
Koitharu a82e41aabb
Deprecate Manga.isNsfw 1 year ago
Draken 3c62f4280f
Merge pull request #1557 from dragonx943/kuma/tweak
[Kumapage] Fixes + Tweak
1 year ago
Draken f806cdd118 [Kumapage] Fixes + Tweak 1 year ago
Draken 224c8a6c53 [Kumapage] Fixes + Tweak 1 year ago
Draken 408be7fa3c
Merge pull request #1555 - dragonx943/sources/kuma
[Kumapage] Add source
1 year ago
Draken a02b001768 [Kumapage] Add source + TODO 1 year ago
Draken 5edbdf801a [Kumapage] Fixes 1 year ago
Draken cc3701250a [Kumapage] Fixes 1 year ago
Draken e94d434f92 [Kumapage] Fixes 1 year ago
Draken 4d3adaa143 [Kumapage] Add source 1 year ago
Draken 36eecf781f
Merge pull request #1552 - palaks-1/source/fixes
[NHentai.xxx] Fixes
1 year ago
palaks-1 76f44c98be [NHentai.xxx] Fix source 1 year ago
Draken d660d9a30f
Merge pull request #1551 - dragonx943/sources/domain
[site/vi] Update sources domain
1 year ago
Draken 8326ee8c29 [site/vi] Update domain 1 year ago
Draken 690a004de6
Merge pull request #1550 - dragonx943/sources/dragon
[Dragon Translation] Remake parser
1 year ago
Draken 98c6454eba [Dragon Translation] Fix pages img 1 year ago
Draken 8e42ef1229 [Dragon Translation] Remake attempt 1 1 year ago
Draken 02359a2630
Fix build 1 year ago
Draken 158f6f4232
[Dragon Translation] Fix build 1 year ago
Draken 77de194b83
[Dragon Translation] Small changes 1 year ago
Draken 85cd13c8b2
Merge pull request #1549 - dragonx943/fixes
[MeHentaiVN] Fix "403 Access Denied" errors
1 year ago
Draken bfbd01b1c2
[MeHentaiVN] Fixes 1 year ago
Koitharu e83636edc0
[NhentaiWorld] Fixes 1 year ago
Koitharu 63dc67b6fd
[NhentaiWorld] Draft 1 year ago
Koitharu b0de4c3b66
[ComicK] Improvements 1 year ago
Draken d5a4cf68c6
[NewTruyen] Fix dateText + parse function 1 year ago
Draken 288d6e0b8f
Merge pull request #1543 - dragonx943/fixes
[NewTruyen] Small fixes
1 year ago
Draken aa98db9ac6 [NewTruyen] Fixes 1 year ago
Draken c19e1fd5e5
[Manganato] Update domain 1 year ago
Draken ef10e3cafc
[MangaNelo.com] Update domain 1 year ago
Draken 400c423dde
[Mangakakalot.gg] Update domain 1 year ago
Draken 85ba4fa0de
[Mangakakalot.gg] Update domain 1 year ago
Draken 8cb913d538
[TruyenHentai18] Update domain 1 year ago
Draken 2128bbe1b6
[TruyenHentai18] Fixes 1 year ago
Draken 4df07a50fb
Merge pull request #1538 - dragonx943/sources/truyenhentai18
[TruyenHentai18] Add source
1 year ago
Draken 8fa63bca64 [TruyenHentai18] Add source 1 year ago
Draken e191e0c9c1
Merge pull request #1529 - dragonx943/fixes
[Madara] Fixes + Update sources domain [vi]
1 year ago
Draken 5970509886 [site/vi] Update sources domain 1 year ago
Draken b2c7d1028c [Madara] Fixes 1 year ago
Draken 377606d1aa [CBHentai + HentaiZ] Fixes 1 year ago
Draken 688bfdaa82
Merge pull request #1528 - dragonx943/sources/mangapill
[MangaPill] Add source
1 year ago
Draken bc60629e66
[MangaPill] Add source 1 year ago
Draken f3372751d1
[MangaPill] Add source 1 year ago
Draken 77a5216ebf
Merge pull request #1524 - shub39/mangakakalot_fix
[Mangakakalot.com] -> [Mangakakalot.gg] Fixes
1 year ago
shub39 7ced91beea 🔧 no description 1 year ago
shub39 41f5cee6c6 New way to get tags 1 year ago
shub39 ba5086c409 🔨 Mangakakalot fix
* issue #1522
* passing 5 out of 11 tests
1 year ago
Koitharu 531145c7f9
Change chapter name to title 1 year ago
Koitharu f26fecb714
Small fixes 1 year ago
Draken dd7568659f
Fix warn 1 year ago
Koitharu 843d1f1bea
Fixes 1 year ago
Koitharu 29263ff59b
Add missing changes from #1496 1 year ago
Koitharu 3b91a3883e
[Grouple] Fixes 1 year ago
Koitharu ddb9b13df7
Some refactor and fix warnings 1 year ago
Koitharu ed578e5bff
Migrate altTitle to Set<String> 1 year ago
Koitharu cdbb004ca1
Merge branch 'feature/search_query' 1 year ago
palaks-1 f6145bc412 Remove deprecated Manga constructor and introduce authors field 1 year ago
Draken 86e7c21e4d
Merge pull request #1514 - VietAnh14/source/buondua
[BuonDua] Some fixes
1 year ago
ViAnh 76638c6aab [BuonDua] Some fixes 1 year ago
Draken 3a950f401b
[BuonDua] Update domain 1 year ago
Draken eeba47ea03
Merge pull request #1513 - VietAnh14/source/buondua
[BuonDua] New source
1 year ago
ViAnh 762f5cbec2 [BuonDua] New source 1 year ago
Draken 5b6c9be6d6
[CBHentai] Small fixes 1 year ago
Koitharu f681ed270b
Update parsers structure 1 year ago
Koitharu a0168a7d49
Improvements 1 year ago
Draken 47263e641e Update Mangaoku.kt 1 year ago
Draken 70c78f1cab Update MangaKazani.kt 1 year ago
Draken e7cb83f7b1 Update MangaSiginagi.kt 1 year ago
Draken 0212e1e18f Update PrunusScans.kt 1 year ago
Draken 265c411df2 Update RobinManga.kt 1 year ago
Draken 69a14b6fc5 Update YaoiTr.kt 1 year ago
Draken 168446e6d5 Update YaoiTr.kt 1 year ago
Draken 23af28f4dd Update NabiScans.kt 1 year ago
Draken ac80384ba1 Update Mangaoku.kt 1 year ago
Draken c4be0b58ca Update ImparatorManga.kt 1 year ago
Draken 023d13e44b Update EpikMan.kt 1 year ago
bai f4909ade09 readded 1 year ago
bai 26fb2b9dcb new domain 1 year ago
bai 44c2ebd4a5 closed 1 year ago
bai 47874032a3 new domain 1 year ago
bai 6dfa5ad1c2 vlosed 1 year ago
bai 111ad58b57 closed 1 year ago
bai f1086c93f7 change 1 year ago
bai 5e27a59414 change domain 1 year ago
bai cece556669 change domain 1 year ago
bai 9252da8004 closed 1 year ago
bai a9fdfee7ad mangagezgini new domain 1 year ago
bai 6904aecea3 they are closed 1 year ago
bai b516632731 milasub new domain 1 year ago
bai 87ad5d0c3a nabiscans closed 1 year ago
bai b95bb46d00 nyx mange closed 1 year ago
bai 60c91f9cc4 prunuscans closed 1 year ago
bai 20ce9cb8d9 robinmanga closed 1 year ago
bai a2625410f9 rüyamange new domain 1 year ago
bai e2697012ea viyafansub is closed 1 year ago
bai f28cd3aad9 webtoonhatti new domain 1 year ago
bai fd82a54d63 change to new domain 1 year ago
bai d95d26a280 change site name 1 year ago
bai 33310bdc42 yaoi tr closed 1 year ago
bai 6281d770c1 zamanmanga is closed 1 year ago
Batu c37f3bdad7 Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/madara/tr/AlliedFansub.kt
allied fansub is closed
1 year ago
Draken 5630ca5982
Merge pull request #1507 - dragonx943/sources/pzt
[PointZero Toons] Add source
1 year ago
Draken ceb75d98d0
[PointZero Toons] Add source
[PointZero Toons] Add source
1 year ago
Draken 75ad3b5657
[PointZero Toons] Add source
[PointZero Toons] Add source
1 year ago
Draken 5adf27eab1
Update SssScanlator.kt 1 year ago
Draken 18fbbed723
Merge pull request #1501 - palaks-1/source/nhentai_xxx_fix
[NHentai.xxx] Fix source
1 year ago
Draken 201bbf755d
Merge pull request #1502 - AwkwardPeak7/weeb2
weebcentral: fix manga list & link resolving
1 year ago
AwkwardPeak7 b9fe35b8c3
weebcentral: fix manga list & link resolving 1 year ago
palaks-1 fd0787152e [NHentai.xxx] Fix source 1 year ago
Koitharu 29cf04c804
MangaParser interface 1 year ago
Koitharu a4827d1b7d
Merge branch 'palaks-1-new-search-query-proposal' into feature/search_query 1 year ago
Koitharu f107e11528
Refactoring 1 year ago
Draken 52b6f75dd0
Merge pull request #1495 - dragonx943/sources/HentaiZ
[HentaiZ] Add source
1 year ago
Draken fd719221ed
[HentaiZ] Add source 1 year ago
Draken a2c04fa954
[HentaiZ] Add source 1 year ago
Draken 6db5ac5f26
[HentaiVn.buzz] Small fixes 1 year ago
Draken cedce23cf5
Merge pull request #1494 - sources/domain
- [TruyenQQ] Refactor
- [TruyenGG] Refactor
- Update some sources domain [site/vi]
1 year ago
Draken 645816f614 [site/vi] Update sources domain 1 year ago
Draken 00a769bfd5
Merge pull request #1492 - dragonx943/sources/HentaiVn
[HentaiVn.buzz] Add source
1 year ago
Draken 0fe5133992
Merge pull request #1493 - dragonx943/sources/SssScanlator
[sources/SssScanlator] Small fixes
1 year ago
Draken 2994f5936e
Merge pull request #1491 - dragonx943/sources/cbhentai
[CBHentai] Fix tags
1 year ago
Draken e8a96fa0ad
[SssScanlator] Small fixes
Fix chapters > numChap [SssScanlator]
1 year ago
Draken 8c8cf5dcd9 [SssScanlator] Fix attempt 1 1 year ago
Draken 19ff5db70d
[HentaiVn.buzz] Add source 1 year ago
Draken c7b689842e
[HentaiVn.buzz] Add source 1 year ago
Draken df03c65d5a
[CBHentai] Small fixes 1 year ago
Draken 4a88a93e8d
[CBHentai] Fix tags
Fix "Root not found" / "Content not found or removed" error [CBHentai]
1 year ago
Draken 950b9e5510
[Vcomycs] Update source domain
Solved #1489
1 year ago
Draken 70230de8e1
[vi/KuroNeko] Fix source name
.../site/vi/viHentai.kt -> KuroNeko.kt [Việt Hentai]
1 year ago
palaks-1 c0ea9cadd7 New MangaSearchQuery implementation proposal 1 year ago
Draken 8c966c3e23
Merge pull request #1478 - VietAnh14/fixes
[Cmanga] Fixes
1 year ago
ViAnh 9f87e77ede [Cmanga] Fixes 1 year ago
ViAnh 6165c9b7cf [Cmanga] Update api 1 year ago
Koitharu 88ea5215c0
[MangaDex] Search by author 1 year ago
Koitharu 522b3d53cc
Artist filter support 1 year ago
Draken 9a1d37166a
[CManga] Small fixes
[CManga] Fix "Nothing found" bug #1477
1 year ago
Draken 07ae87f72b
[Kiryuu] Update domain
[Kiryuu] Fix looping captcha
1 year ago
Draken a16a202800
Merge PR #1475 - dragonx943/sources/viHentai
[viHentai] Add source
1 year ago
Draken 09ff9c0a7d [viHentai] Fix attempt 2 1 year ago
Draken 8c4a93d1dc [viHentai] Fix attempt 1 1 year ago
Draken 2da8a4af84 [viHentai] Add source 1 year ago
Draken 7d5f0d3187
Merge pull request #1464 - scryptan/wamanga-fix
[WaManga] Fixes
1 year ago
Draken a65c1810a2
Merge pull request #1465 - palaks-1/sources/nhentai_xxx
[NHentai.xxx] Fix source
1 year ago
palaks-1 09b7d27b40 [NHentai.xxx] Fix source 1 year ago
scryptan e1175d01cc use api, instead of html parse 1 year ago
Draken 51fe77ff33
Merge pull request #1461 - dragonx943/site/vi/newtruyen
[NewTruyen] Add source + Update sources domain [vi]
1 year ago
Draken c540266e48 [NewTruyen] Add source 1 year ago
Draken 198e859850
[HamTruyen] Small fixes
[HamTruyen] Fix chapters order + Number
1 year ago
Draken 87944719a5
Merge pull request #1457 - palaks-1/sources/nhentai_xxx
[NHentai.xxx] Add source
1 year ago
palaks-1 6b44656819 [NHentai.xxx] Add source 1 year ago
Draken 35334e82ae
Merge pull request #1456 - palaks-1/sources/nhentai_to
[NHentai.to] Add source
1 year ago
palaks-1 32e7b7184f [NHentai.to] Add source 1 year ago
Draken 4098183be7
Merge pull request #1453 - dragonx943/sources/muitohentai
[MuitoHentai] Fix "Content not found or removed" errors
1 year ago
Draken e25b03b68f [MuitoHentai] Fix attempt 3 1 year ago
Draken 7076b2d443 [MuitoHentai] Fix attempt 2 1 year ago
Draken 1933200372 [MuitoHentai] Fixes 1 year ago
Draken 0359afe574
Merge pull request #1452 - dragonx943/sources/hamtruyen
[HamTruyen] Add source
1 year ago
Draken cd8282bbe0 [HamTruyen] Add source 1 year ago
Draken 8350b3c70e Merge branch 'sources/hamtruyen' of https://github.com/dragonx943/Kotatsu-parsers into sources/hamtruyen 1 year ago
Draken 8488d80c6f
Merge branch 'KotatsuApp:master' into sources/hamtruyen 1 year ago
Draken affd4646af
Merge pull request #1451 - dragonx943/fixes
[Hentai18VN, TruyenHentaiVN] Refactor + Fixes
1 year ago
Draken 9853638c6b [Hentai18VN] Refactor 1 year ago
Draken faf2f2a312 [HentaiVN18] Refactor + Fixes 1 year ago
Draken 4adf485208 Merge branch 'fixes' of https://github.com/dragonx943/Kotatsu-parsers into fixes 1 year ago
Draken eaab76aa9e [HamTruyen] Add source 1 year ago
Draken 2ca740a85d
Merge pull request #1450 - dragonx943/sources/hentai18vn
[Hentai18VN] Add source
1 year ago
Draken 569aa73cd4 [Hentai18VN] Fix attempt 3 1 year ago
Draken d3858ff5ff [Hentai18VN] Fix attempt 2 1 year ago
Draken 2760dc8a76 [Hentai18VN] Fix attempt 1 1 year ago
Draken c1990dde75 Merge branch 'sources/hentai18vn' of https://github.com/dragonx943/Kotatsu-parsers into sources/hentai18vn 1 year ago
Draken af5b62b8fd
Merge branch 'KotatsuApp:master' into sources/hentai18vn 1 year ago
Draken fcd6e92dba [Hentai18VN] Add source 1 year ago
Draken dad098ff34
Merge pull request #1449 from dragonx943/fixes
[TruyenHentaiVN] Fix tags
1 year ago
Draken aa9a499a09 [TruyenHentaiVN] Fix tags 1 year ago
Draken f10e6f021c Add base code for Hentai18VN 1 year ago
Draken 09596331fd
[TruyenHentaiVN] Fix tags 1 year ago
Draken c8f9962def
[TruyenHentaiVN] Fix tags 1 year ago
Draken e91e8fcccb Merge branch 'sources/hentai18vn' of https://github.com/dragonx943/Kotatsu-parsers into sources/hentai18vn 1 year ago
Draken a702f1f7ef
Merge pull request #1439 from scryptan/wamanga-fix
[WaManga] Fix NoSuchMethodError
1 year ago
Draken 9654a68dad
Merge pull request #1443 - VietAnh14/fixes
[CManga] Fixes
1 year ago
ViAnh a2f18e2284 [Cmanga] Remove escape chars in description 1 year ago
ViAnh fceb927bce [Cmanga] Small fixes 1 year ago
ViAnh f192c1716d [Cmanga] Filter unknown manga item 1 year ago
Draken f2960c2229
Merge pull request #1442 - dragonx943/sources/manhwaindo
[ManhwaIndo] Fix "Content not found or removed"
1 year ago
Draken 552a68c1d4 [ManhwaIndo] Fix attempt 1 1 year ago
Draken e5f277c957
Merge pull request #1441 from AwkwardPeak7/weeb
WeebCentral: some fixes
1 year ago
AwkwardPeak7 ff05dd7694
[WeebCentral] fix chapter number not correct in some cases 1 year ago
AwkwardPeak7 7c604647bf
[WeebCentral] fix tags not loading 1 year ago
Draken 0fd0c38971
Merge pull request #1440 - dragonx943/sources/truyenhentaivn
Add new source: TruyenHentaiVN
1 year ago
Draken a454613ce8 [TruyenHentaiVN] Add source 1 year ago
Draken 73dd8c4688 [TruyenHentaiVN] Add source 1 year ago
scryptan 1a07cfc144 try fix not exist optJSONObject in kotatsu app 1 year ago
Draken 715fe9e645
[Weeb Central] Add source 1 year ago
Draken 6316ac055c
Merge pull request #1438 from AwkwardPeak7/weeb
[Weeb Central] Add source
1 year ago
Draken ff64db7a7c
Merge pull request #1437 from VietAnh14/fixes
[NetTruyen] Fix fetch chapters
1 year ago
AwkwardPeak7 d5a299d7ff
mark MangaSee and MangaLife as broken 1 year ago
AwkwardPeak7 0e3c3f89d0
add Weeb Central 1 year ago
ViAnh 234e23b7a5 [NetTruyenVie] Fix fetch chapters 1 year ago
ViAnh 69093b8214 [NetTruyen] Fix fetch chapters 1 year ago
Draken 708d7908aa
[GocTruyenTranh] Fixes
Add NSFW detector by Tags [site/vi/GocTruyenTranh]
1 year ago
Draken b08051789d
[GocTruyenTranh] Fixes
Fix author + "null" in altTitle [GocTruyenTranh]
1 year ago
Draken 794a737b6d
Merge pull request #1435 - dragonx943/vi/goctruyentranh
Add new source: GocTruyenTranh
1 year ago
Draken b1ef29fadf [GocTruyenTranh] Add source 1 year ago
Draken 0475fc81ed [GocTruyenTranh] Add source 1 year ago
Koitharu 6aefb603ae
[NetTruyen] Fix chapters order 1 year ago
Draken 92bdefcf9d
Merge pull request #1432 - dragonx943/sources/wpcomics
[site/wpcomics/vi] Fixes
1 year ago
Draken 576de54ef8 [wpcomics/vi/nettruyen*] Fix fetch invaild format 1 year ago
Draken 319225d645 [wpcomics/vi/nettruyen*] Fixes 1 year ago
Draken 9addea65d6 [wpcomics/vi/nettruyen*] Fixes 1 year ago
Draken 95c43a06d5 [wpcomics/vi/nettruyen*] Fixes 1 year ago
Draken 72c7317672
Merge pull request #1429 - dragonx943:sources/nsfw
Mark some sites as NSFW sources
1 year ago
Draken 1cec3e5436 Mark some sites as NSFW sources 1 year ago
Draken 6c18056cef
[NetTruyenVie] Fix attempt 1 1 year ago
Draken f4b678083d
Merge pull request #1422 from VietAnh14/source/vivicomi
[Vcomycs] New source
1 year ago
ViAnh 50eb6f7b7e [Vcomycs] Update content type 1 year ago
ViAnh 0b6af2549f [Vcomycs] New source 1 year ago
Draken f229d95469
Merge pull request #1421 - dragonx943:site/domain
Update some sources domain
1 year ago
hoanphonglinh 7ba725de6d [vi] Update sources domain 1 year ago
Draken 677fd51e91
Update ManhuaScan.kt 1 year ago
Koitharu 45b843fadc
Replace try-catch with use{} in parse functions 1 year ago
Koitharu 9ec62f0be0
Refactor 1 year ago
Draken 9baba17692
Merge pull request #1413 from dragonx943/sources/manhwaindo
[Manhwaindo] Fix missing images
1 year ago
hoanphonglinh d86d0a5d5b [Manhwaindo] Fix attempt 2 1 year ago
hoanphonglinh 543e8e3267 [Manhwaindo] Fix attempt 1 1 year ago
Draken 267dcf8fa7
[TruyenTranh3Q] Fixes 1 year ago
Draken fec4594480
Merge pull request #1410 from dragonx943:site/vi/final
[TruyenTranh3Q] Add source
1 year ago
hoanphonglinh f669df2ff5 [TruyenTranh3Q] Add source 1 year ago
hoanphonglinh 9dfa652125 [TruyenTranh3Q] Add source 1 year ago
Draken 87f1cd8cdc
Update Komiku.kt 1 year ago
Draken f30f8ee563
Merge pull request #1408 from dragonx943:site/vi/yurineko
[YuriNeko] Fix 403 errors + Missing images
1 year ago
Draken 64eceba9fa
[YuriNeko] Fix domain 1 year ago
Draken 58e09bdaba
Merge pull request #1397 from sources/nettruyenvie
[NetTruyenVie] Add source
1 year ago
Koitharu d4e9040ccf Fixes 1 year ago
Koitharu 653894cb82 First commit from local/Kotatsu-parsers 1 year ago
Draken 581cb0c1fe First commit from local/Kotatsu-parsers 1 year ago
Koitharu 4e545ca7af
[WaManga] Refactor #1390 1 year ago
scryptan b4050e0335 refactor 1 year ago
scryptan 1cecd1fe94 add wamanga parser 1 year ago
Koitharu 51ed1b2db8
Update json utils 1 year ago
Draken 3e4d712dca
Merge pull request #1385 from dragonx943/sources/domain
Update sources domain
1 year ago
Draken d284b647ab
Merge branch 'KotatsuApp:master' into sources/domain 1 year ago
Draken 3bcd05fc5f Update sources domain 1 year ago
Koitharu b0a1cc48a6
Update utils 1 year ago
Draken 2eba38b289 Update domains 1 year ago
Koitharu 8481fadbd0
Fix absolute url resolving 1 year ago
Koitharu 6abcdd8d4b
Fix nullable strings usage 1 year ago
Koitharu 5df1445e29
Migrate models to data class and update Manga class 1 year ago
Draken a94adf4d90
[CuuTruyen] Fixes
Fix timer + Add manga status for CuuTruyen
1 year ago
dragonx943 f21aa282dd [CuuTruyen] Fix attempt 1 1 year ago
Draken 8ce6694232
Merge pull request #1336 from dragonx943/sources/vi
[sources/vi] Fix sources domain
1 year ago
Draken d8bd51e17e [site/vi] Change sources domain 1 year ago
Draken b1b04d2953
Merge pull request #1331 from dragonx943/final
Update domain + Fixes
1 year ago
dragonx943 8852020ca8 [MeHentaiVN] Add source 1 year ago
Draken deb4f763d0
Merge pull request #2 from dragonx943/test
[NetTruyen] Fixes
1 year ago
dragonx943 58593de53d [NetTruyen] Fix attempt 2 1 year ago
dragonx943 040be87a54 [NetTruyen] Fix attempt 1 1 year ago
Draken 1703ac39d5
Rename + Change domain sources from dragonx943/test
Rename + Change domain sources
1 year ago
dragonx943 839869061e Rename some sources 1 year ago
Koitharu 764c65563b
[Grouple] Fixes 1 year ago
Koitharu 2550b9cac1
[KomikTap] Fix http pages 1 year ago
Koitharu 04225170d3
[Grouple] Fix pages parsing 1 year ago
Koitharu 326a3f78b2
Fix Response.mimeType extension 1 year ago
Koitharu f86d31f811
Update utils 1 year ago
vianh 10dac6c0d4 [CManga] New source 1 year ago
devi fece09b781 Merge remote-tracking branch 'origin/master' 1 year ago
devi 6b1fb90584 Update source
[AnimeH] Broken source close #1301
 [SussyScan] fix close #1298
 close #1287
 close #1282
 close #1273
 close #1267
 close #1268
 close #1260
 close #1234
 [Manhwa18] fix src close #1297
 close #1290
 close #1288
[KomikCast] close #1284
close #1283
close #1272
close #1270
close #1265
[Birdtoon] close #1276
[Komiktap] close #1275
[SadScans] close #1274
[Flamecomics] reverse chapter fix close #1266
[Kiryuu] fix url close #1258
[MangaTime] Broken close #1228
[Dexhentai] fix close #1226
1 year ago
Koitharu 5055cfddbd
Merge pull request #1294 from dragonx943/patch-1 1 year ago
Draken 878d20e612
[ComicExtra] Remake #1 1 year ago
Koitharu 336f64712d
Merge pull request #1292 from dragonx943/patch-1 1 year ago
Draken 56989c6b2f
Update Fecomicc.kt 1 year ago
Draken 7b1d3dcd46
Fixes 1 year ago
Koitharu f1e66daf06
Merge pull request #1291 from dragonx943/test 1 year ago
dragonx943 ca087290b9 [YuriNeko] Fix attempt 1 1 year ago
Koitharu 883886bc32
Merge pull request #1281 from dragonx943/test 1 year ago
Draken 9e60253eb1
Merge branch 'KotatsuApp:master' into test 1 year ago
dragonx943 dba85f2bff Fixes 1 year ago
Koitharu e5b852793e
Merge pull request #1280 from dragonx943/test 1 year ago
Draken d4cbd86b96
Update summary.yaml 1 year ago
Draken 8d82bacaa3
Merge branch 'KotatsuApp:master' into test 1 year ago
dragonx943 4af760c172 Fixes 1 year ago
dragonx943 7747bc53b1 Update domain + Refactor 1 year ago
Koitharu 0456a92f15
[WpComics] Fix covers 1 year ago
Draken 677afa9a41
Update TopTruyen.kt (#1279) 1 year ago
Koitharu ebcce4f2ec
[DocTruyen3Q] Fix covers 1 year ago
Koitharu 91e53e4872
[AsuraScans] Fix pages parsing 1 year ago
Koitharu 8bc51b3b79
[FlameComics] Rewrite #1237 1 year ago
Koitharu 733d3ca69f
[ResetScans] Migrate to the new website (close #1252) 1 year ago
Koitharu 4a2e465e5d
[NineManga] Fixes #1253 1 year ago
Draken 7ed3c5a175
Update sources domain (#1254)
* Update Mangarbic.kt

* Update BlogTruyenParser.kt

* Update CuuTruyenParser.kt

* Update HentaiVNParser.kt

* Update LxManga.kt

* Update DocTruyen3Q.kt

* Update NetTruyenHE.kt

* Update NetTruyenLL.kt

* Update NetTruyenSSR.kt

* Update NetTruyenUU.kt

* Update NhatTruyenVN.kt

* Update TopTruyen.kt

* Update OtakusanVi.kt

* Update OtakusanEn.kt

* Update HentaiVnPlus.kt

* Update TruyenTranhDamMyy.kt

* Update TruyenVn.kt

* Update DocTruyen5s.kt
1 year ago
bivaly 8567cedf0a
Updating FlameComics ListUrl (#1255)
* Update FlameComics.kt

* Update FlameComics.kt

* Update FlameComics's listUrl

Ther are no longer a listUrl but maybe they are  going to revert the changes so leaving there the overide for easy changing

* removing FlameComics ListUrl

Ther are no longer a listUrl but maybe they are  going to revert the changes so leaving there the overide for easy changing
1 year ago
Koitharu e02b563bf4
[AsuraComic] Fix pages parsing 1 year ago
Draken 27d2814ef9
[LectorManga] Update domain (#1233)
* Update LectorManga.kt

* Update LikeManga.kt
1 year ago
Draken de95fa2e3e
Update domain + Add sources (#1221)
* Update HariManga.kt

* Update Mgkomik.kt

* Update XoxoComics.kt

* Create DuaLeoTruyen.kt

* Create MyComicList.kt

* Update DuaLeoTruyen.kt

* Update summary.yaml

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Add suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Fixes

---------

Co-authored-by: Koitharu <nvasya95@gmail.com>
1 year ago
Koitharu 5a4f9d8914
[MangaFire] Fix search 1 year ago
Koitharu 23271c4623
[LibSocial] Use site domains 1 year ago
Koitharu f3d14e101c
[MintManga] Update domain 1 year ago
Koitharu 275d7f5419
[ImHentai] Fix pages parsing 1 year ago
Koitharu ac163085e1
[ImHentai] Fix pages parsing 1 year ago
Draken 8b4bac3cc2
[TruyenGG] Refactor (#1215)
* Update TruyenGG.kt

* Update TruyenGG.kt
1 year ago
Koitharu 35f4db7905
Rollback kotlin to fix build 1 year ago
Koitharu 08b1241a68
Remove redundant trim and mapNotNullToSet usage 1 year ago
Koitharu 3ffcefaa1b
Get rid of redundant boxing 1 year ago
Koitharu 3b173dc6fc
Improve utils 1 year ago
Draken 60f1fb1f70
Update + Add source (#1213)
* Update YurinekoParser.kt

* Create TruyenGG.kt

* Update and rename Truyenqq.kt to TruyenQQ.kt

* Update TruyenGG.kt

* Update summary.yaml

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Fix tags

---------

Co-authored-by: Koitharu <nvasya95@gmail.com>
1 year ago
devi f610ae6412 [ManhwaLatino] fix the order of the chapters and some manga don't display all the chapters.
close #1211
1 year ago
Koitharu f80b586081
[Iken] Fixes 2 years ago
Koitharu 7999064dd4
[MangaFire] Fix search (close #1196) 2 years ago
Koitharu 8c387e7983
[Desu] Cleanup 2 years ago
Koitharu 1d29cacf04
[LxManga] Refactor 2 years ago
Draken ce4b4d06d0
Update domain + Fixes (#1206)
* Update MaidScan.kt

* Update NetTruyenLL.kt

* Update Quaanhdaocuteo.kt

* Fixes
2 years ago
Koitharu 79e1d59482
Fixes 2 years ago
devi 47e1c0fa89 Minor changes 2 years ago
devi 3ec23a56ab Minor changes 2 years ago
devi a01493e071 [EpsilonScan] fix close #1038
[Keyoapp] fix pages close #1192
[Yaoiflix] Fix close #1201
[Teamxnovel] fix Searching close #1176
close #1185
[ManhwaLatino] close #1181
[MiHentai] fix close #1180
[GenzToon] close #1169
[MangaGalaxy] close #1168 ( redirect to VortexScans )
[CatharsisFantasy] close #1158
[CatharsisWorld] close #1157
Update soures
2 years ago
Draken 16052210c1
[NhatTruyenVN] Fixes (#1199)
* Update NhatTruyenVN.kt

* Update src/main/kotlin/org/koitharu/kotatsu/parsers/site/wpcomics/vi/NhatTruyenVN.kt

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

* Update suggestions

Co-authored-by: Koitharu <nvasya95@gmail.com>

---------

Co-authored-by: Koitharu <nvasya95@gmail.com>
2 years ago
Koitharu 7dd9658864
[Ahen] Fix chapters parsing 2 years ago
Koitharu f794f411b7
[DesuMe] Fix covers 2 years ago
Draken 94fbbad6f3
[DocTruyen3Q + TopTruyen] Fixes (#1187)
* Update DocTruyen3Q.kt

* Update CuuTruyenParser.kt

* Update NetTruyen.kt

* Update DocTruyen3Q.kt

* TopTruyen sus :p

* Update TopTruyen.kt

* Remove unavailable tags

* Update TopTruyen.kt

* Remove redundant trim() calls

* Update DocTruyen3Q.kt

* Update TopTruyen.kt

* Remove some unavailable tags

* Update TopTruyen.kt

* Update DocTruyen3Q.kt

* Update TopTruyen.kt

---------

Co-authored-by: Koitharu <nvasya95@gmail.com>
2 years ago
Koitharu 4c5ed57958
Fix json iterator 2 years ago
Draken 3d5cc5ceff Update TruyenVn.kt 2 years ago
Draken c506153737 Update OtakusanEn.kt 2 years ago
Draken 95ee8a4a16 Update OtakusanVi.kt 2 years ago
Draken 3f7b0695f4 Update LxManga.kt 2 years ago
Draken b3bb0939de Update HentaiVnPlus.kt 2 years ago
Koitharu 08fe54c36d
Refactor json utilities 2 years ago
Koitharu 166c5be5d6
Refactoring 2 years ago
Koitharu 7ce2a97c1f
[DocTruyen5s] Refactoring 2 years ago
Draken b23e241973 Update DocTruyen5s.kt 2 years ago
Draken b8b54b7139 Update DocTruyen5s.kt 2 years ago
Draken 5853e54684 Update SayHentai.kt 2 years ago
Draken 32e3b07e5d Update HentaiCube.kt 2 years ago
Draken 3b99676e7a Update HentaiCube.kt 2 years ago
Draken 3c829302c7 Update TopTruyenViet.kt 2 years ago
Draken b579b01db5 Update DocTruyen3Q.kt 2 years ago
Draken 7db798c420 Update CuuTruyenParser.kt 2 years ago
Draken 77ed11b58e Update summary.yaml 2 years ago
Draken 3ae6ffb8d0 Tweak :p 2 years ago
Draken 0e5e4d57c5 Create SayHentai.kt 2 years ago
Koitharu d8cb38a9be
Improve links resolving 2 years ago
Koitharu 07b0a2da9e
Fix loading manga by link 2 years ago
Koitharu 1d040e8291
Link resolver implementation 2 years ago
davvarrr 797a91a037
Merge pull request #1167 from dragonx943/patch-1
Daily update
2 years ago
devi 9c0b57835f [WebtoonEmpire] fix close #1134 2 years ago
devi 72564f5449 [MadaraParser] auto fix url manga on
fix query ( withoutAjax
[MangaPark] fix close #1152
close #1145
[AdultWebtoon] fix close #1139
[ComicExtra] change type close #1130
[liliana] fix and add ManhuaPlusorg, Raw1001, MangaSect, MangaKoma01, ManhuaGold
2 years ago
Draken cc18692538
Update ComickFunParser.kt 2 years ago
Draken 3e102e9d69 Update BlogTruyenParser.kt 2 years ago
Draken b9b53bcc91 Add some tags 2 years ago
Draken 00ef8e5cdf Remove broken tag 2 years ago
Draken a738591dd0 Update DocTruyen5s.kt 2 years ago
Draken 85771bcd89 Fix BlogTruyen 2 years ago
Koitharu 0150ede4cb
[Liliana] Fix chapters order 2 years ago
Draken 1406165789 Update LxManga.kt 2 years ago
Draken a62eadb581 Fix filters 2 years ago
Draken 197b148fca Update Hijala.kt 2 years ago
Draken fd4a1cda2d Update HentaiCube.kt 2 years ago
Draken 108920c4fd Update DocTruyen5s.kt 2 years ago
Draken 49c7702707 Update LilianaParser.kt 2 years ago
Draken 15361654b7 Create LilianaParser.kt 2 years ago
Draken c2b64d40d1 Create DocTruyen5s.kt 2 years ago
Koitharu ce9404da84
[Madara] Fix multiple pages handling 2 years ago
Koitharu 5c55d65eb3
Refactor and fixes 2 years ago
Draken 6f9180545b Update and rename HentaiVnParser.kt to HentaiVnPlus.kt 2 years ago
Draken e966aa07cd Create BlogTruyenParser.kt 2 years ago
Draken 102993cb10 Remove BlogTruyen - Closed site 2 years ago
Draken d0627b83d0 Update BlogTruyenVN.kt 2 years ago
Draken a6602a0f5a Update BlogTruyenVN.kt 2 years ago
Draken 55b9a72795 Create BlogTruyenVN.kt 2 years ago
Draken d4f0c9d116 Update LxManga.kt 2 years ago
Draken bbb3184454 Update CuuTruyenParser.kt 2 years ago
Draken 7d81e68757 Update CuuTruyenParser.kt 2 years ago
Draken 487388c950 Update DocTruyen3Q.kt 2 years ago
Draken 9d507eb10c Update TopTruyenViet.kt 2 years ago
Draken cf9b19ffa7 This source will no longer work!
This site has been sold and it has changed name + source structure :p

Changed to: https://github.com/KotatsuApp/kotatsu-parsers/issues/1135
2 years ago
Draken 9591269a68 Update TopTruyenViet.kt 2 years ago
Draken 27fe426213 Update DocTruyen3Q.kt 2 years ago
Draken b23d899f5e Update CuuTruyenParser.kt 2 years ago
Draken 8bf0c03fe2 Tags ? 2 years ago
Draken 1ebb298cd7 Update CuuTruyenParser.kt 2 years ago
Draken 9f588ab9fd Rename HentaiVn.kt to HentaiVnParser.kt 2 years ago
Draken 91a1198900 Update HentaiVn.kt 2 years ago
Draken 321ac087dc Update TruyenVn.kt 2 years ago
Draken 5d81c030b5 Rename HentaiVnFit.kt to HentaiVn.kt 2 years ago
Draken e798bc7787 Update HentaiVnFit.kt 2 years ago
Draken 057b053d58 Update TopTruyenViet.kt 2 years ago
Draken e9a9ef6b95 Update DocTruyen3Q.kt 2 years ago
Draken bd7062a479 Update LxManga.kt 2 years ago
Koitharu a659068ef3
Fixes batch 2 years ago
Koitharu 6f7e1fcfb2
[Grouple] Fix closing response on error 2 years ago
Draken eff26c8889 Create NetTruyenFE.kt 2 years ago
Draken 037059d0e6 Update NetTruyenHE.kt 2 years ago
Draken 9b78a3ecf2 Update PinkTeaComic.kt 2 years ago
Draken 4df8311e18 Update Truyenqq.kt 2 years ago
Koitharu 645006fde8
Fix closing response in interceptors 2 years ago
Koitharu 95bbfefe90
Merge pull request #1128 from dragonx943/patch-1
Daily update sources
2 years ago
Draken 3181a5860b
Update Truyenqq.kt 2 years ago
Draken 39c83444b7
Broken:( 2 years ago
Draken 7356d2036d
Update BlogTruyenVNParser.kt 2 years ago
Koitharu a8df8665ae [CuuTruyen] Fix tags 2 years ago
Koitharu 6355252699
Merge pull request #1117 from dragonx943/het-cuu-truyen
[CuuTruyen] Fix tags
2 years ago
Draken 6ec65e4aa4
Update TopTruyenViet.kt 2 years ago
Draken f58bb421e5
Update DocTruyen3Q.kt 2 years ago
Draken 38ecde185a
Update CuuTruyenParser.kt 2 years ago
Draken e3924d5b06
Remove something
Remove some Tags (not found manga with it)
2 years ago
Draken c1a7fc2624
Update CuuTruyenParser.kt 2 years ago
Koitharu 818d14fdbb
Merge pull request #1109 from dragonx943/patch-2
Update domain
2 years ago
Draken e93a6865e7
Update HentaiVNParser.kt 2 years ago
Draken c6325eef50
Update DocTruyen3Q.kt 2 years ago
Draken 06d6653c78
Create DocTruyen3Q.kt 2 years ago
Draken aac722909b
Update HentaiVNParser.kt 2 years ago
Draken 78d64713a2
Create NetTruyenUU.kt 2 years ago
devi 7d71421c30 Update sources
Close #1105
Close #1104
2 years ago
Koitharu 7a571e596d
[NHentai] Fix duplicated preferences 2 years ago
davvarrr 1718a034ee
Merge pull request #1101 from dragonx943/patch-1
Update domain (Fetch data error)
2 years ago
devi 5443daabb6 [PornComix] fix close #1100 2 years ago
devi de4f8ef2f9 [madtheme] add SearchWithFilters
[manga18] add SearchWithFilters
[mangadventure] add SearchWithFilters
[mangaworld] add POPULARITY_ASC, NEWEST_ASC, SearchWithFilters, Year, ContentTypes, Multiple states
Remove type on fetch tags
[mmrcms] add SearchWithFilters
[TrWebtoon] add SearchWithFilters
[Truyenqq] add TagsExclusion, ContentTypes, NEWEST_ASC, UPDATED_ASC, POPULARITY_ASC
[Baozimh] add ContentTypes
[zmanga] add SearchWithFilters, Year, ContentTypes
2 years ago
Draken b9fb4a5e93
Update HentaiVNParser.kt 2 years ago
Koitharu 3cdd391410
[ExHentai] Use ContentType filter 2 years ago
Koitharu ad77e8afa4
[CuuTruyen] Fix UserAgent 2 years ago
Koitharu c85d17d60d
[CuuTruyen] Fix 2 years ago
Koitharu 955c75a99f
[CuuTruyen] Fix attempt 1 2 years ago
devi d32d1f5044 [ImHentai] add ContentTypes
add new ContentTypes
[iken] add SearchWithFilters, ContentTypes
[keyoapp] add SearchWithFilters
[likemanga] add POPULARITY_TODAY, POPULARITY_WEEK, POPULARITY_MONTH, SearchWithFilters
2 years ago
Koitharu 091c5247d5
Merge pull request #1093 from dragonx943/het-cuu-truyen
Fix decryptor + Add image reconstructor ?
2 years ago
Draken c593798d83
Update NetTruyenSSR.kt 2 years ago
Draken edac81f51a
Update NetTruyenLL.kt 2 years ago
Draken 8da3b4b7ac
Update NetTruyen.kt 2 years ago
Draken 844e5fdb50
Update Truyenqq.kt 2 years ago
Draken 3f502de1f7
Update Truyenqq.kt 2 years ago
Draken 6532f392b5
Update Truyenqq.kt 2 years ago
Draken 9f340b2dfb
Update LxManga.kt 2 years ago
Draken 30d34de653
Update CuuTruyenParser.kt 2 years ago
devi 52329e658f [SoManga] close #1091
[AsuraComic] fix close #1088
[HeanCms] add SearchWithFilters
[fuzzydoodle] add ContentTypes, SearchWithFilters
[DoujinDesu.tv] add SearchWithFilters, ContentTypes
2 years ago
devi 1c0df02c56 [HiveToon] fix close #1086 2 years ago
devi 2061a971b8 [fmreader] add SearchWithFilters
[LegacyScans] add ContentTypes, SortOrder.UPDATED
[MangaMana] add SortOrder.RATING_ASC , SortOrder.NEWEST_ASC
[Manhwa18.com] move to /en
Add new option on .toAbsoluteUrl()
2 years ago
Koitharu cc62981f12
[ComicK] Fix chapter numbers in names 2 years ago
Koitharu 613623fa53
Update information pages 2 years ago
devi fffdbfdbee [MangaDex] fix close #1084 2 years ago
devi a55a4720fe [TuMangaOnline] add SearchWithFilters, TagsExclusion, Demographics, ContentTypes
[TempleScanEsp] fix
Add KODOMO on Demographic
Add ONE_SHOT on ContentType
2 years ago
Koitharu 42f2813e44
Update readme 2 years ago
Koitharu c58d35288b
Added some codegen features 2 years ago
Koitharu f410df40f1
Improve utilities 2 years ago
Koitharu 7e95949ab7
[Grouple] Improve filter 2 years ago
Koitharu 7c4c3a3c97
[ExHentai] Improve filtering and tags 2 years ago
devi 7f25c5f82d [Manga-Starz] Fix close #1073 2 years ago
devi f1939391e8 [AsuraComic] Fix close #1077
add ContentTypes , SearchWithFilters
[CloneManga] isSearchSupported false
[MangaGeko] fix tags,  add MultipleTags, TagsExclusion
[Manhwa18.net] add isSearchSupported
[ManhwasMen] add Exception
[Pururin] add SearchWithFilters, MultipleTags , TagsExclusion
[VyManga] add SearchWithFilters, MultipleTags, TagsExclusion
2 years ago
devi d1f9b0d829 [mangaDex] Fix SortOrder and remove YearRange
[Bato] add OriginalLocale
[MangaPark] add OriginalLocale, SearchWithFilters
[Comick] add SearchWithFilters
[NineMangaParser] add SearchWithFilters
[AnimeBootstrap] add SearchWithFilters , ContentTypes
[madara] add Year, SearchWithFilters, SortOrder.RELEVANCE
[TeamXNovel] add ContentTypes , SearchWithFilters
2 years ago
Koitharu f2354957e6
[MangaDex] Fix filter capabilities 2 years ago
devi 600eab20a1 remove unessery code
Remove old chapter manga and change last source for new manga chapter
Simplify parseChapterDate
Fix fetchAvailableTags on some source need to override
2 years ago
Koitharu 336c4a4d49
Add year constants 2 years ago
Koitharu a1e47edfb2
Fix getPageUrl visibility 2 years ago
Koitharu 5269659c73
Merge pull request #1063 from KotatsuApp/feature/advanced_filter
Advanced filter
2 years ago
Koitharu ab1b549a64
Merge branch 'master' of github.com:KotatsuApp/kotatsu-parsers into feature/advanced_filter 2 years ago
devi 2867dffc5a Capabilities simplified on madara to avoid redundant code 2 years ago
Koitharu 6d972a13fe
Fix filter capabilities/options default values 2 years ago
Koitharu 3918bfafdf
Specify visibility modifiers explicitly 2 years ago
Koitharu 1c15e569bf
Migration to MangaListFilterCapabilities 2 years ago
Koitharu 5030548500
Global refactoring: partial migrate to ListFilterOptions/ListFilterCapabilities 2 years ago
Koitharu c5d3a7b0c1
Global refactoring: migrate getList to new filter 2 years ago
Draken 75c46130ed Update NetTruyenSSR.kt 2 years ago
Draken 62c17155ca Update NetTruyen.kt 2 years ago
Draken 3a52062ce4 Update NetTruyenLL.kt 2 years ago
Draken 74ccbdcaee Update HentaiCube.kt 2 years ago
Draken fbd610349a Update PinkTeaComic.kt 2 years ago
Draken 8af75d8c6a Update TruyenVn.kt 2 years ago
Draken 0bafd117ff Update and rename HentaiVnCafe.kt to HentaiVnFit.kt 2 years ago
Koitharu ae9a7c6090
Introduce MangaListFilterCapabilities class 2 years ago
Koitharu 481ad02e01
Partial migration to MangaListFilterV2 2 years ago
Koitharu 9042074c50
Merge remote-tracking branch 'origin/SortOrderPopularitytime' into feature/advanced_filter 2 years ago
Koitharu 821e51ff7d
Merge branch 'filter-type-and-demographic' into feature/advanced_filter 2 years ago
Koitharu 0f4808f5b5
Use ContentType instead of Type 2 years ago
Koitharu aba8a80d8f
[MangaDex] Fix settings 2 years ago
Koitharu 2b0edfde60
[MangaDex] Data-saver server support (close #1062) 2 years ago
Koitharu 01a496768a
MangaListFilter v2 2 years ago
Draken 82a57cb3c8 Update HentaiVN Domain 2 years ago
Naga 27c07d86bc fix tags 2 years ago
Naga e916f2a66e Closes #1057 2 years ago
devi 494ecdfec8 Add year range. 2 years ago
devi 8a3c0e02f6 Add relevance sort order for good filters with text searches
Adding the added filter
2 years ago
devi 37428bf9c0 Add year, localeMangas filter on MangaListFilter.Advanced
Add year, localeMangas and localeMangas on MangaDexParser
2 years ago
devi 62b4a21cef Add query filter on MangaListFilter.Advanced
Add query.filer Advanced on MangaDexParser
2 years ago
davvarrr b566e4e7e4
Update src/main/kotlin/org/koitharu/kotatsu/parsers/MangaParser.kt
Co-authored-by: Koitharu <nvasya95@gmail.com>
2 years ago
davvarrr 4ff2061bb7
Update src/main/kotlin/org/koitharu/kotatsu/parsers/MangaParser.kt
Co-authored-by: Koitharu <nvasya95@gmail.com>
2 years ago
devi 03d7e138e9 Add support type and Demographic on MangaListFilter 2 years ago
devi 0540da57e6 Update Sources
Close #1060
2 years ago
devi e0e9d25a4f Improve madara 2 years ago
devi fd7684866e Refonte madara :
Add tagsExclude
Add support multiple tags
Add RATING_ASC ( and prepare RELEVANCE )
simplify postrequest
Preparation for query, year, author and artist filters
Change withoutAjax for some sources
2 years ago
devi 7c8b427d2a Miss add SortOrder in availableSortOrders for NHentaiParser 2 years ago
devi 4fcc68d149 add POPULARITY time sortOrder 2 years ago
Koitharu ad726a3fd7
[CuuTruyen] Fixes 2 years ago
Koitharu f167a8c8f6
[CuuTruyen] Fixes 2 years ago
Draken b404b44008 insert something :p 2 years ago
Draken c6dc51bb5d Some suggestions 2 years ago
Draken 69ee6be246 Create NetTruyenHE.kt 2 years ago
Draken e08fab2938 Final Source commit:(
i cried:(
2 years ago
Koitharu 5db275bce2
[Grouple] Handle Usagi redirects 2 years ago
Koitharu 9c05e3f8e8
Usagi parser 2 years ago
devi ac2c9b8621 Add InfamousScans 2 years ago
devi 02852ac4e7 Add Exclude Genres and new sort order on ComickFunParser
close #1032
2 years ago
devi 1163541ac5 Update sources and add sources
Fix MagusManga close #1021
2 years ago
devi 2da0c17fe2 Merge remote-tracking branch 'origin/master' 2 years ago
devi 7528480f54 add asc 2 years ago
devi a45a270479 Add Asc on some SortOrder 2 years ago
devi 380804361b Add Asc on some SortOrder 2 years ago
devi e072ee8b44 add asc 2 years ago
devi d812644e61 Add Asc on some SortOrder 2 years ago
devi 71deb11e1f Add Asc on some SortOrder 2 years ago
devi 2e138da3d5 add source and fix 2 years ago
Koitharu 939b6b1e46
Improve TooManyRequestExceptions 2 years ago
Koitharu d937c7e6ab
[ExHentai] Fix IP ban detection 2 years ago
Koitharu f91ff0b9d0
[ExHentai] Detect IP ban 2 years ago
Koitharu 98cbee11b9
Unify user agent header processing 2 years ago
Draken 7f0431d493 Update NetTruyenLL.kt 2 years ago
Draken 3227c53699 Create NetTruyenLL.kt 2 years ago
Draken 2ee8784d7d Update NetTruyen.kt 2 years ago
Draken 0fac0f450c Create BlogTruyenVNParser.kt 2 years ago
Draken b4fb202db4 Update BlogTruyenParser.kt 2 years ago
Draken 93ba163ea2 Update NetTruyen.kt 2 years ago
Draken bd3ff20146 Hotfix Source 2 years ago
devi b3a0b97f0e add HastaTeamReader close #1004
Update Sources ( fix , url and broken )
Add sources
2 years ago
devi ca212ca692 add auth on some sources
add Config Header on many sources
close #974
2 years ago
devi 3b809202b3
Merge pull request #994 from dragonx943/patch-1
Update HentaiVN Domain
2 years ago
Draken 9858419586
Update HentaiVN Domain 2 years ago
devi 1f7fe2aed3 Fix Sources
Add sources
Correct detect login on MadaraParser
close #988
fix oocini.biz close #987
Changing the latest DEPRECATION
( Technically we can raise the old support for getList() and MangaChapter() )
2 years ago
devi 71affd155c Some Fix 2 years ago
devi a54f030c4e Fix, change url and add broken
Fix HeamCms Chapter close #970
add Template FuzzyDoodleParser
Fix LelScanVf
Add hentaislayer , ScyllaComics
Close #609
Close #901
Close #440
Add template IkenParser
Fix MangaGalaxyParser
Add VortexScans
Fix HniScantrad
Add HastaTeam close #939
Add HotComicsParser close #962
Add HentaiCrot close #913
2 years ago
devi 3b5a018f8c Fix, change url and add broken 2 years ago
devi 8d5fc945d4 Move ReaperComics to HeanCms
Rework heancms based on @NagaYZ's code
2 years ago
devi 50194df24d Rm duniakomik close #952
Fix DoujinKu close #951
Fix DoujinDesu.tv close
Add, fix some sources  #963
change url close #965 ( the copy was added anyway )
2 years ago
devi 5f771973a8
Merge pull request #967 from NagaYZ/fix-reaper
Fix reaperscan site structure changed
2 years ago
devi 4fcfbb374f
Merge pull request #959 from dragonx943/patch-1
Broken source + Update domain
2 years ago
Koitharu 853c21e49f
[ExHentai] Fix tags parsing 2 years ago
Naga 3efb5d8520 fixed page duplicated 2 years ago
Naga e74985d870 Merge branch 'refs/heads/master' into fix-reaper
# Conflicts:
#	src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/ReaperComics.kt
2 years ago
Naga 32eab42c26
Merge pull request #31 from KotatsuApp/master
[pull] master from KotatsuApp:master
2 years ago
Naga 0d3d8232f9 reaper changed site structure 2 years ago
Draken fd3dc389df
Update Saytruyenhay.kt 2 years ago
Draken 2d274f37ee
Update TruyenTranhDamMyy.kt 2 years ago
Draken d1a12df18f
Create Saytruyenhay.kt 2 years ago
Draken 017a9a58a3
Update TruyenTranhDamMyy.kt 2 years ago
Draken e33c57f51b
Update Quaanhdaocuteo.kt 2 years ago
Draken da041df054
Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/madara/vi/Saytruyenhay.kt 2 years ago
Draken 99b57e1297
Update TruyenTranhDamMyy.kt 2 years ago
Draken 6ce8a22514
Update BlogTruyenParser.kt 2 years ago
Draken a9fc534ea7
Remove dead sources + Change domain (#955)
* Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/vi/TruyentranhLHParser.kt

* Update NetTruyen.kt

* Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/wpcomics/vi/NetTruyenAA.kt

* Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/wpcomics/vi/NetTruyenX.kt

* Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/wpcomics/vi/Nettruyenmax.kt

* Update NetTruyen.kt

* Update NhatTruyenVN

* Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/wpcomics/vi/NhatTruyenSS.kt
2 years ago
devi 6e04fa1251 Add CupFoxParser and sources
Add OneMangaParser and sources
2 years ago
devi 8f851282b4 Add XManhwa, MangaMana, MangaFr
Fix XoxoComics close #928
2 years ago
devi d774935a6a Merge remote-tracking branch 'origin/master' 2 years ago
devi 712d42b328 Some Fix 2 years ago
SBS123 f66aab03f2
Updata MangaNoon (#924)
* Update Manjanoon.kt

* Update Manjanoon.kt

* Update Manjanoon.kt
2 years ago
devi 53ca9c9677 Rework WpComicsParser
Adds sources
2 years ago
devi 7bcc624a74 Add some sources
Fix some sources
close #929
2 years ago
devi fa2d73348e rm @Broken 2 years ago
devi 26c3edbc17 Fix some sources
add @Broken on sources
Close #927
2 years ago
Koitharu b9d89edfc2
[DynastyScans] Fixes 2 years ago
Koitharu 7ccc6438d5
[Grouple] Fix public url 2 years ago
devi cd9f65596b Add some sources
Fix some sources
Add @Broken on some sources
close #909 #912 #920 #910 #914 #916
2 years ago
Koitharu 5404743f17
Update dependencies 2 years ago
Koitharu c0296d0882
Fix build 2 years ago
Inkesk dec07bd27d Update MangaGalaxy.kt
parser issue for mangagalaxy not loading chapter solved
2 years ago
Inkesk 1be1e9843d Update MangaGalaxy.kt
Updated the domain
2 years ago
Inkesk 5b5afcfe4c Update LuaScans.kt
Luascans.com changed domain to Luacomic.com , committing the change so future bugs and issues don't arise.
2 years ago
Inkesk 4f5dc9ffad Update ManhuaScan.kt
manhuascan.io was changed to kaliscan.io hence the change committed in here .
2 years ago
Inkesk 9c5b00ecf0 Update DarkScans.kt
Updated the domain name from darkscans.com to darkscans.net
2 years ago
Inkesk 048ba99323 Update ResetScans.kt
Changed the domain from reset-scans.xyz to reset-scans.co.
As previous domain for source was closed due to DMCA. Figure out the rest of details yourself , but resetscans is still on mates. Enjoy
2 years ago
devi 35bdc15aa9 Fix url grimelek 2 years ago
Koitharu b06288e7eb
Merge branch 'master' into experimental/abstract_sources 2 years ago
SBS123 426c7ad708 Update Normoyun.kt 2 years ago
devi fadf8861e7 Rm BrMangas close #454
rm PowerManga close #462
Add MangaNinja close #897
Add Grimelek close #375
Add OpiaToon close#374
Add login on madara
Fix ManhwaFreak
2 years ago
devi dceedf019a Fix HentaiVN
Remove some warning
2 years ago
devi f49ddf7437 Fix HentaiVN
Remove some warning
2 years ago
devi 33a3c8350d
Merge pull request #885 from dragonx943/patch-1
domain change hentaivn
2 years ago
devi 052fa146d2
Merge pull request #889 from NagaYZ/fix-manhuaplus
domain change manhuaplus
2 years ago
devi 1c85782690 Fix CafecomYaoi close #715
Fix Topmanhua close #710
Add Some sources
Add new Parser
fix EpsilonScan close #602
Add MangaHub.link close #892
Fix SussyScan close #893
Fix MaidScan close #585
fix HuntersScan close #584
Fix TeamXNovel close #575
close #574
Fix MangaTown close #569
Add LeitorDeManga close #864
add Search on ScanParser
Add mangabr close #862
Add Manga Italia close #841
Add Mangá Terra close #856
Fix MilaSub close #559
Fix KomikTapParser close #554
Fix YugenApp close #586
Fix TuMangaOnline close #566
Fix bato close #516
Fix Mangakakalot close #490
2 years ago
Draken cf5abdf7d3
Update HentaiVNParser.kt 2 years ago
Naga df562bedbb Closes #886 2 years ago
devi 11c1eafa0d Fix ReaperComics close #816
close #699
Fix GuildaTierDraw close #786
Fix Truyenqq close #793
Fix JiangzaiToon close #783
Fix MangaPark close #734
Fix ScanIta close #670
Add MangaPeak close #888
Fix ArvenComics close #660
2 years ago
Draken cc8369e02a
Update HentaiVNParser.kt 2 years ago
devi cd468df9ad Add : MangaTx.to - AlterkaiScans - StoneScape
Kalango - SolooScan - ErosScans - SolooScan
RaysScan - TemakiMangas - ZinChanManga
Urls Changes
Add volume on some parser
Fix ThunderScans close #818
NinjaScan close #753
Add Ngomik close #761
Remove dulicate source paragonscans
Fix LuminousScans close #578
Fix YugenApp close #854
Fix KaiScans close #614
Add PeachBl Close #884
Fix GalinhaSamurai close #835
Fix RocksManga close #827
Fix CrystalComics close #792
2 years ago
Draken 64666e42e8
Update HentaiVNParser.kt
not sure, i hope it will works...
2 years ago
Koitharu ad7c953d29
Merge pull request #883 from NagaYZ/fix-mangaworld 2 years ago
Naga b345efe2d1 fixed source mangaworld pages 2 years ago
Naga 74b8aaa94e fixed source mangaworldadult 2 years ago
Naga 1ace1ba3ec added source mangaworldadult, refactor mangaworld 2 years ago
Naga b822574b70 moved mangaworld in package 2 years ago
devi 466ca0f0e9 Merge remote-tracking branch 'origin/master' 2 years ago
devi 0bcac0c639 Add @Broken on some Dead Sources
Urls Changes
repair sources
Remove Duplicate Source
Add volume on some parser
2 years ago
Koitharu 7433fb8fa0
[LibSoc] Fix image server option 2 years ago
Koitharu b1ac1cf238
[LibSoc] Fix image server option 2 years ago
Naga a88a861d82 added main page sortby updated 2 years ago
Naga 41ced8edee added states , fixed title, added types 2 years ago
Koitharu 0f73f74539
[LibSoc] Default image server config option 2 years ago
Koitharu bb9902e3b2
[LibSoc] Add split by translations config option 2 years ago
devi c03b0fc981 Fix search on MangaDexParser
Fix State on ScansMangasMe
2 years ago
Koitharu e8733f15e4
Merge pull request #853 from NagaYZ/fix-mangaworld 2 years ago
Naga 26519c71a6 fix #852 2 years ago
Koitharu f923acc5a7
Extract MangaSource interface 2 years ago
Naga 55b9b6aac4
Merge pull request #26 from KotatsuApp/master
[pull] master from KotatsuApp:master
2 years ago
devi 52db07a33b MiHentai Closes #829
MrBenne Closes #627
Closes #555
Norterose Closes #625
Closes #564
Closes #483
MaidSecret Closes #623
Closes #550
HentaiOrigines
Fix SortOrder On MmrcmsParser
2 years ago
Koitharu 7ed8c9f787
[AllHen] Update domain 2 years ago
Koitharu cc12b193d3
[MangaWorld] Refactor 2 years ago
Koitharu 4d9c51bcd9
[MangaWtf] Parser 2 years ago
Naga 7de48393b6 added source MangaWorld 2 years ago
Naga 283aa92fd5 fixed url and chapters 2 years ago
Naga 9d0393ca72 update domain 2 years ago
Koitharu c5fc7f76d3
Merge pull request #845 from NagaYZ/fix-aquamanga 2 years ago
Naga 6b6325895b update aquamanga domain 2 years ago
Koitharu 39e9f5a2ff
Added UNKNOWN manga source constant 2 years ago
Koitharu 204223e2a8
Merge pull request #837 from NagaYZ/fix-baozimh 2 years ago
Naga b39abcc4a0 fix select chapter 2 years ago
SBS123 f49e9fa66b Update MangaPro.kt 2 years ago
scaledzdn 56fd22b43f src: id: shinigami: Change url to shinigamitoon.id
* Previously not working.
2 years ago
Koitharu 0b2bf607f7
Make locale non-nullable 2 years ago
Zakhar Timoshenko 3ff9e69585
[MangaOVH] Removal (closes #824) 2 years ago
Koitharu 9ceb90204c
Merge pull request #801 from OtakuArab/patch-3 2 years ago
SBS123 ab3c4b2b2d
Update and rename PewPiece.kt to Gmanga.kt 2 years ago
SBS123 060ca4b7ef
Update MangaLinkNet.kt 2 years ago
SBS123 a7a29ef029
Rename LinkManga.kt to MangaLinkNet.kt 2 years ago
SBS123 e7826c2570
Update src/main/kotlin/org/koitharu/kotatsu/parsers/site/madara/ar/LinkManga.kt
Co-authored-by: Koitharu <nvasya95@gmail.com>
2 years ago
SBS123 77a733a062 Update and rename EnAresManga.kt to FlAres.kt 2 years ago
SBS123 96bd99650f Update and rename RocksManga.kt to YuriMoonSub.kt 2 years ago
SBS123 2d1899f1f2 Update TeamXNovel.kt 2 years ago
SBS123 42b1740b0f Update FlixScans.kt 2 years ago
SBS123 fbf613c4e6 Update and rename MangalinkParser.kt to LekMangaOrg.kt 2 years ago
SBS123 bf18070bc1 Update and rename MangaLekNet.kt to MangaLeko.kt 2 years ago
SBS123 c9c41fdd33 Create StellarSaber.kt 2 years ago
SBS123 949d822385 Delete src/main/kotlin/org/koitharu/kotatsu/parsers/site/madara/ar/StellarSaber.kt 2 years ago
SBS123 83b8f28055 Create MangaPro.kt 2 years ago
SBS123 c602d817d9 Create CrowScans.kt 2 years ago
SBS123 a50b217b6f Update and rename MangaLekCom.kt to LekMangaCom.kt 2 years ago
SBS123 23c031780f Update and rename Mangaatrend.kt to ManhaTok.kt 2 years ago
SBS123 e3b50d5dfa Update and rename KingOfShojo.kt to Hijalacom.kt 2 years ago
SBS123 9b1bee567f Update and rename KingOfManga.kt to MangaAtrend.kt 2 years ago
SBS123 3534973357 Create RocksManga.kt 2 years ago
SBS123 c4c4bdd547 Update Source 2 years ago
SBS123 528fadceb8 Update UmiManga.kt 2 years ago
SBS123 e15aecaa52 Update and rename BeastScans.kt to UmiManga.kt 2 years ago
SBS123 62e635ebe5 Update MangaTime.kt 2 years ago
SBS123 be846b43ae Update Mangaspark.kt 2 years ago
SBS123 7bfe168eee Update MangaLionz.kt 2 years ago
SBS123 e8d4a91ad7 Update MangaStarz.kt 2 years ago
SBS123 bc1804c030 Update and rename LikeManga.kt to LikeMangaNet.kt 2 years ago
SBS123 d18c9fe0da Update LikeManga.kt 2 years ago
SBS123 a7ea87dd8c Update AzoraMoon.kt 2 years ago
SBS123 e3a0ab8513 Update LikeManga.kt 2 years ago
SBS123 3a77144a5e Update LikeManga.kt 2 years ago
SBS123 69e311829b Update and rename MangaLike.kt to LikeManga.kt 2 years ago
SBS123 b1effdb7c7 Update and rename Azoranov.kt to AzoraMoon.kt 2 years ago
SBS123 124537eb18 Create ArAreaScans.kt 2 years ago
SBS123 b81924b144 Update Manjanoon.kt 2 years ago
Naga dd18605678 cleanup 2 years ago
Naga 6bce31d43f fix title selector 2 years ago
Naga 0da5b47ce5 fetching code using suspendlazy 2 years ago
Naga d0d891b112 Apply suggestions from code review
Co-authored-by: Koitharu <nvasya95@gmail.com>
2 years ago
Naga 2292c392e9 fix filters, added search support (#775) 2 years ago
Koitharu cc35aa6bc3 Broken parser annotation 2 years ago
SBS123 9e4a124a58
Update ScarManga.kt 2 years ago
SBS123 d63afb0917
Rename LikManga.kt to LinkManga.kt 2 years ago
SBS123 80a3f61a1f
Create ArAreaScans.kt 2 years ago
SBS123 303c817d2d
Update and rename MangalinkNet.kt to LikManga.kt 2 years ago
SBS123 d9cf94c6b5
Update LekManga.kt 2 years ago
SBS123 d72414d531
Update and rename AresManga.kt to ScarManga.kt 2 years ago
Koitharu 26be293f24
[ComicK] Fix volume & chapter numbers 2 years ago
SBS123 36b8633c1e Update and rename MangaLek.kt to LekManga.kt 2 years ago
Naga 6bdbf2c65e closes #795 2 years ago
Zakhar Timoshenko 80ec615548
[Comick] Fix "No value for hentai" 2 years ago
Koitharu ffc45ff79b
Merge pull request #777 from AwkwardPeak7/patch-1 2 years ago
AwkwardPeak7 48cb28d9f8
Update MangaFireParser.kt 2 years ago
Koitharu 51da0b62c1 Apply suggestions from code review 2 years ago
AwkwardPeak7 e391ed52f3 MangaFire: related manga author language filter 2 years ago
AwkwardPeak7 a5e6e4255d MangaFire: tag exclusion 2 years ago
AwkwardPeak7 590e7e3ba3 add MangaFire 2 years ago
Koitharu d218ad5a67
[MangaReaderTo] Syncronize descrambling 2 years ago
Koitharu 350bc0ad58
Small refactor 2 years ago
AwkwardPeak7 a0f9bc032d cleanup and test impl 2 years ago
AwkwardPeak7 4edcd70871 simplify api 2 years ago
AwkwardPeak7 90c0bf46f4 image redrawing api 2 years ago
AwkwardPeak7 9d4fc1980f MangaReader.To 2 years ago
Naga 332524f1f3 new source added modescanlator, closes #759 2 years ago
Koitharu 078b59b1e2
[Madara] Migrate to ScatterSet and add UserAgent config option 2 years ago
Koitharu 0551ed5f0b
[MangaDex] Chapters fixes 2 years ago
Koitharu 915d4093b9
Fixes 2 years ago
Koitharu 952e9c39ac
[LibSocial] Rewrite parsers 2 years ago
Koitharu 3e32a6280a
[LuratoonScan] Rewrite parser 2 years ago
Zakhar Timoshenko 288b67e250
Close #741 2 years ago
Zakhar Timoshenko 7d2f5696f5
Close #719 2 years ago
Zakhar Timoshenko 8c3fec0933
Close #677 2 years ago
Koitharu 33b00fe65f
Fixes #728 #725 2 years ago
Koitharu f0f50a37b5
Fixes batch #733 #730 #731 #727 2 years ago
Koitharu c2b2148190
Upgrade gradle 2 years ago
Koitharu f22362dc53
[Grouple] Fix alt titles parsing 2 years ago
Koitharu 68cc1d4c4f
[DesuMe] Improvements 2 years ago
Zakhar Timoshenko 0f84ef1e58
[Desu] Fix list loading 2 years ago
Naga a245574dee fix #702 2 years ago
Koitharu fd90970173
[MadTheme] Improve pages parsing 2 years ago
Koitharu 75a55e4748
[SinensisScans] Update domain #674 2 years ago
Koitharu bbd4867830
[FbSquads] Update domain #681 2 years ago
Koitharu 3463c8a49e
[HeanCms] Fixes 2 years ago
Koitharu 7829a2ad3b
[MadTheme] Fix pages parsing #701 2 years ago
AwkwardPeak7 fb387dbcd9 FreakComic: update selectors 2 years ago
Koitharu cd0b3cd1dc
Update gitignore 2 years ago
Koitharu fe500a27c0
Update dependencies 2 years ago
Koitharu da860bc250
Fix imports 2 years ago
Koitharu e3a8eeb647 Update src/main/kotlin/org/koitharu/kotatsu/parsers/site/heancms/en/OmegaScans.kt
Co-authored-by: AwkwardPeak7 <48650614+AwkwardPeak7@users.noreply.github.com>
2 years ago
Naga 6dbc709f98 fix #687 2 years ago
AwkwardPeak7 49ca54d68a MangaGalaxy: move to MangaReader 2 years ago
AwkwardPeak7 e81b8127af DrakeScans: move to MangaReader 2 years ago
AwkwardPeak7 d1b490ed98 add german to MangaPlus 2 years ago
AwkwardPeak7 893ed6afd2 999Hentai: fix api url 2 years ago
Zakhar Timoshenko 9df2cf5bd7
[AstrumScans] Source removal on request (closes #682) 2 years ago
Zakhar Timoshenko b5ceaf4e65
Add OtakuWorld to README 2 years ago
Naga 44ea9fe709 fix #667 2 years ago
Koitharu 8174229d28
Merge pull request #658 from NagaYZ/fix-xoxocomics
fix chapter order
2 years ago
Zakhar Timoshenko f628ed2e1b
Add kotatsu-dl to README 2 years ago
Zakhar Timoshenko c90a0a77bc
Update README 2 years ago
Naga 10b3f2e65b fix chapter order 2 years ago
Koitharu 9821e93d25
Merge pull request #655 from NagaYZ/fix-xoxocomics
fix #652
2 years ago
Naga 6c16c9c55f fix #652 2 years ago
Koitharu ab679bccc6
Merge pull request #647 from NagaYZ/fix-issue
Fix Omegscans
2 years ago
Naga 6fcf3785ef fix #591 2 years ago
Naga e605b325c7 fixrating 2 years ago
Naga a714d0927e fix OmegaScans tags 2 years ago
Naga fe1ef89d30 fix OmegaScans site structure changed 2 years ago
Koitharu 639895f511
MangaOVH: Parsing related manga 2 years ago
Koitharu 4fdc02de35
MangaOVH: Fix handling unexisting branches 2 years ago
Koitharu 103ef11f3d
Dynamic UserAgent support 2 years ago
Koitharu 69e7efe6d1
NHentai: configurable User-Agent 2 years ago
Koitharu 83e971d85b
Update UserAgents 2 years ago
Koitharu fec60955ed
Merge pull request #631 from NagaYZ/fix-issue
Fix source domain and issue
2 years ago
Koitharu c5b980a406
Update issue templates 2 years ago
Koitharu 99483268d6
Remove Bakai parser #629 2 years ago
Naga 67fef2cc1c fix mangatown select and pagination 2 years ago
Naga f450111c1c fix Baozimh webview and 50+ page chapter 2 years ago
Naga 718e7eab82 fix source domain 2 years ago
Koitharu eb031b00ab
ComicK: Update domain 2 years ago
Koitharu a10c1101ab
New source: MangaOVH 2 years ago
Naga 9c0c20f86b Merge remote-tracking branch 'upstream/master' 2 years ago
Koitharu c6d1f1b525
[NepNep] Refactor #628 2 years ago
Koitharu 9a84791f5c
Merge pull request #628 from NagaYZ/feature-manga4life-sort
Feature manga4life sort
2 years ago
Koitharu 14fc02cb23
[ReaperComics] Refactor #620 2 years ago
Koitharu 39ae6a406c
[Grouple] Added Seimanga support 2 years ago
Koitharu ba8682f79e
Merge pull request #620 from NagaYZ/feature-search-reaper
Feature search reaper
2 years ago
Naga 986e4d8fcc
Merge pull request #1 from KotatsuApp/master
[pull] master from KotatsuApp:master
2 years ago
Naga 016ced24e0 added pagination 2 years ago
Naga b60b2d8355 added sort by popularity/updated 2 years ago
Naga f733b85878 fix webtoon sort newest 2 years ago
Naga 1926a73dee without serialization, and added cache 2 years ago
Naga f39a9f191a cleanup 2 years ago
Naga 6d8d757798 fix manga chapters list 2 years ago
Naga 81975977ad added search support for reaperscans 2 years ago

@ -3,12 +3,18 @@ root = true
[*]
charset = utf-8
end_of_line = lf
indent_style = tab
indent_style = space
indent_size = 4
max_line_length = 120
tab_width = 4
insert_final_newline = true
trim_trailing_whitespace = true
# noinspection EditorConfigKeyCorrectness
disabled_rules = no-wildcard-imports, no-unused-imports
[{*.kt,*.kts}]
[*.{kt,kts}]
ij_kotlin_allow_trailing_comma = true
ij_kotlin_allow_trailing_comma_on_call_site = true
[*.md]
indent_size = 2
trim_trailing_whitespace = false

@ -24,6 +24,7 @@ body:
1. First step
2. Second step
3. Issue here
Please use English language
validations:
required: false

@ -1,30 +1,31 @@
name: ⭐ Feature request
description: Suggest a feature to improve a source
labels: [feature request]
labels: [ feature request ]
body:
- type: textarea
id: feature-description
attributes:
label: Describe your suggested feature
description: How can an existing source be improved?
placeholder: |
Example:
"It should work like this..."
validations:
required: true
- type: textarea
id: feature-description
attributes:
label: Describe your suggested feature
description: How can an existing source be improved?
placeholder: |
Example:
"It should work like this..."
Please use English language
validations:
required: true
- type: textarea
id: other-details
attributes:
label: Other details
placeholder: |
Additional details and attachments.
- type: textarea
id: other-details
attributes:
label: Other details
placeholder: |
Additional details and attachments.
- type: checkboxes
id: acknowledgements
attributes:
label: Acknowledgements
options:
- label: I have searched the existing issues and this is a new ticket, **NOT** a duplicate or related to another open issue.
required: true
- type: checkboxes
id: acknowledgements
attributes:
label: Acknowledgements
options:
- label: I have searched the existing issues and this is a new ticket, **NOT** a duplicate or related to another open issue.
required: true

@ -1,33 +1,31 @@
name: 🗑 Source removal request
description: Scanlators can request their site to be removed
labels: [source removal]
labels: [ source removal ]
body:
- type: input
id: link
attributes:
label: Source link
placeholder: |
Example: "https://example.org"
validations:
required: true
- type: input
id: link
attributes:
label: Source link
placeholder: |
Example: "https://example.org"
validations:
required: true
- type: textarea
id: other-details
attributes:
label: Other details
placeholder: |
Additional details and attachments.
- type: textarea
id: other-details
attributes:
label: Other details (reason for removal, etc)
placeholder: |
Additional details and attachments.
- type: checkboxes
id: requirements
attributes:
label: Requirements
description: Your request will be denied if you don't meet these requirements.
options:
- label: Proof of ownership/intent to remove sent to a Kotatsu Discord server mod via DM
required: true
- label: Site only hosts content scanlated by the group and not stolen from other scanlators or official releases (i.e., not an aggregator site)
required: true
- label: Site is not infested with user-hostile features (e.g., invasive or malicious ads)
required: true
- type: checkboxes
id: requirements
attributes:
label: Requirements
description: Your request will be denied if you don't meet these requirements.
options:
- label: Proof of ownership of the website is sent to a Kotatsu [Discord server](https://discord.gg/NNJ5RgVBC5) or [Telegram community](https://t.me/kotatsuapp)
required: true
- label: Site only hosts content scanlated by the group and not stolen from other scanlators or official releases (i.e., not an aggregator site)
required: true

@ -0,0 +1 @@
total: 1251

@ -0,0 +1,27 @@
name: Check & Test latest parsers
on:
push:
branches:
- master
jobs:
check-and-build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository 🌏
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up enviroment 🔧
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: '17'
distribution: 'temurin'
- name: Set up Gradle 📦
uses: gradle/actions/setup-gradle@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2
with:
cache-read-only: true
- name: Compile parsers 🚀
run: ./gradlew compileKotlin

@ -1,4 +1,4 @@
name: Parsers test
name: Parsers test for PRs
on:
workflow_dispatch:
@ -13,10 +13,19 @@ jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
java-version: '11'
- name: Checkout repository 🌏
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up enviroment 🔧
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: '17'
distribution: 'temurin'
cache: 'gradle'
- run: ./gradlew assemble
- name: Set up Gradle 📦
uses: gradle/actions/setup-gradle@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2
with:
cache-read-only: true
- name: Compile parsers 🚀
run: ./gradlew compileKotlin

13
.gitignore vendored

@ -17,6 +17,7 @@
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
.idea/**/Project_Default.xml
# Gradle
.idea/**/gradle.xml
@ -26,6 +27,8 @@
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
.idea/deviceManager.xml
.idea/.name
.idea/artifacts
.idea/compiler.xml
.idea/jarRepositories.xml
@ -71,11 +74,21 @@ fabric.properties
.gradle/
build/
bin/
.idea/**/misc.xml
.idea/**/vcs.xml
.idea/**/ktlint.xml
.idea/codeStyles/
.idea/kotlinc.xml
src/test/resources/cookies.txt
local.properties
.kotlin/
!/.idea/kotlin-statistics.xml
.idea/**/discord.xml
.idea/**/migrations.xml
.idea/**/runConfigurations.xml
.idea/**/AndroidProjectSystem.xml
.idea/caches/deviceStreaming.xml

5
.idea/.gitignore vendored

@ -1,3 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# GitHub Copilot persisted chat sessions
/copilot/chatSessions
.name
deviceManager.xml

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="KotlinJpsPluginSettings">
<option name="version" value="1.9.22" />
</component>
</project>

@ -1,10 +1,10 @@
# Contributing
The following is guide for creating a Kotatsu parsers. Thanks for taking the time to contribute!
The following is a guide for creating Kotatsu parsers. Thanks for taking the time to contribute!
## Prerequisites
Before you start, please note that the ability to use following technologies is **required**.
Before you start, please note that the ability to use the following technologies is **required**.
- Basic [Android development](https://developer.android.com/)
- [Kotlin](https://kotlinlang.org/)
@ -16,24 +16,24 @@ Before you start, please note that the ability to use following technologies is
- [IntelliJ IDEA](https://www.jetbrains.com/idea/) (Community edition is enough)
- Android device (or emulator)
Kotatsu parsers is not a part of Android application, but you can easily develop and test it directly inside an Android
application project and relocate it to the library project when done.
Kotatsu parsers are not a part of the Android application, but you can easily develop and test it directly inside an
Android application project and relocate it to the library project when done.
### Before you start
First, take a look at `kotatsu-parsers` project structure. Each parser is a single class that
extends `MangaParser` class and have a `MangaSourceParser` annotation.
Also pay attention on extensions in `util` package. For example, extensions from `Jsoup` file
First, take a look at the `kotatsu-parsers` project structure. Each parser is a single class that
extends the `MangaParser` class and has a `MangaSourceParser` annotation.
Also, pay attention to extensions in the `util` package. For example, extensions from the `Jsoup` file
should be used instead of existing JSoup functions because they have better nullability support
and improved error messages.
## Writing your parser
So, you want to create a parser, that will provide access to manga from a website.
First, you should explore a website for API availability.
First, you should explore a website to learn about API availability.
If it does not contain any documentation about
API, [explore network requests](https://firefox-source-docs.mozilla.org/devtools-user/):
some websites use ajax.
some websites use AJAX.
- [Example](https://github.com/KotatsuApp/kotatsu-parsers/blob/master/src/main/kotlin/org/koitharu/kotatsu/parsers/site/ru/DesuMeParser.kt)
of Json API usage.
@ -42,43 +42,48 @@ some websites use ajax.
- [Example](https://github.com/KotatsuApp/kotatsu-parsers/blob/master/src/main/kotlin/org/koitharu/kotatsu/parsers/site/en/MangaTownParser.kt)
of pure HTML parsing.
If website is based on some engine it is rationally to use common base class for this one (for example, Madara wordress
theme
and the `MadaraParser` class)
If the website is based on some engine it is rationally to use a common base class for this one (for example, Madara
Wordpress theme and the `MadaraParser` class)
### Parser class skeleton
Parser class must have exactly one primary constructor parameter of type `MangaLoaderContext` and have an
`MangaSourceParser` annotation that provides internal name, title and language of a manga source.
All functions in `MangaParser` class are documented. Pay attention to some peculiarities:
- Never hardcode domain. Specify default domain in `configKeyDomain` field and obtain an actual one using `getDomain()`.
- All ids must be unique and domain-independent. Use `generateUid` functions with relative url or some internal id which
is unique across the manga source.
- `sortOrders` set should not be empty. If your source is not support sorting, specify one most relevance value.
- If you cannot obtain direct links to pages images inside `getPages` method, it is ok to use an intermediate url
as `Page.url` and fetch a direct link at `getPageUrl` function.
- You can use _asserts_ to check some optional fields. For example. `Manga.author` field is not required, but if your
source provide such information, add `assert(it != null)`. This will not have any effect on production but help to
find issues during unit testing.
- If your source website (or it's api) uses pages for pagination instead of offset you should extend `PagedMangaParser`
instead of `MangaParser`.
- Your parser may also implement the `Interceptor` interface for additional manipulation of all network requests and/or
The parser class must have exactly one primary constructor parameter of type `MangaLoaderContext` and have an
`MangaSourceParser` annotation that provides the internal name, title, and language of a manga source.
All members of the `MangaParser` class are documented. Pay attention to some peculiarities:
- Never hardcode domain. Specify the default domain in the `configKeyDomain` field and obtain an actual one using
`domain`.
- All IDs must be unique and domain-independent. Use `generateUid` functions with a relative URL or some internal id
that is unique across the manga source.
- The `availableSortOrders` set should not be empty. If your source does not support sorting, specify one most relevant
value.
- If you cannot obtain direct links to page images inside the `getPages` method, it is ok to use an intermediate URL
as `Page.url` and fetch a direct link in the `getPageUrl` function.
- You can use _asserts_ to check some optional fields. For example, the `Manga.author` field is not required, but if
your source provides this information, add `assert(it != null)`. This will not have any effect on production but help
to find issues during unit testing.
- Your parser may also implement the `Interceptor` interface for additional manipulation of all network requests and
responses, including image loading.
- If your source website (or its API) uses pages for pagination instead of offset you should extend `PagedMangaParser`
instead of `MangaParser`.
- If your source website (or its API) does not provide pagination (has only one page of content) you should extend
`SinglePageMangaParser` instead of `MangaParser` or `PagedMangaParser`.
![parser_classes.png](docs/parser_classes.png)
## Development process
During the development it is recommended (but not necessary) to write it directly
in the Kotatsu android application project. You can use `core.parser.DummyParser` class as a sandbox. `Dummy` manga
source is available in debug Kotatsu build.
During the development, it is recommended (but not necessary) to write it directly
in the Kotatsu Android application project. You can use the `core.parser.DummyParser` class as a sandbox. The `Dummy`
manga source is available in the debug Kotatsu build.
Once parser is ready you can relocate your code into `kotatsu-parsers` library project in a `site` package and create a
Pull Request.
Once the parser is ready you can relocate your code into the `kotatsu-parsers` library project in a `site` package and
create a Pull Request.
### Testing
It is recommended to run unit tests before submitting a PR.
It is recommended that unit tests be run before submitting a PR.
- Temporary modify the `MangaSources` annotation class: specify your parser(s) name(s) and change mode
to `EnumSource.Mode.INCLUDE`
@ -87,5 +92,5 @@ It is recommended to run unit tests before submitting a PR.
## Help
If you need a help or have some questions, ask a community in our [Telegram chat](https://t.me/kotatsuapp)
If you need help or have some questions, ask a community in our [Telegram chat](https://t.me/kotatsuapp)
or [Discord server](https://discord.gg/NNJ5RgVBC5).

@ -1,8 +1,9 @@
# Kotatsu parsers
This library provides manga sources.
This library provides a collection of manga parsers for convenient access manga available on the web. It can be used in
JVM and Android applications.
[![](https://jitpack.io/v/KotatsuApp/kotatsu-parsers.svg)](https://jitpack.io/#KotatsuApp/kotatsu-parsers) ![Kotlin](https://img.shields.io/github/languages/top/KotatsuApp/kotatsu-parsers) ![License](https://img.shields.io/github/license/KotatsuApp/Kotatsu) [![Telegram](https://img.shields.io/badge/chat-telegram-60ACFF)](https://t.me/kotatsuapp) [![Discord](https://img.shields.io/discord/898363402467045416?color=5865f2&label=discord)](https://discord.gg/NNJ5RgVBC5)
![Sources count](https://img.shields.io/badge/dynamic/yaml?url=https%3A%2F%2Fraw.githubusercontent.com%2FKotatsuApp%2Fkotatsu-parsers%2Frefs%2Fheads%2Fmaster%2F.github%2Fsummary.yaml&query=total&label=manga%20sources&color=%23E9321C) [![](https://jitpack.io/v/KotatsuApp/kotatsu-parsers.svg)](https://jitpack.io/#KotatsuApp/kotatsu-parsers) ![License](https://img.shields.io/github/license/KotatsuApp/Kotatsu) [![Telegram](https://img.shields.io/badge/chat-telegram-60ACFF)](https://t.me/kotatsuapp) [![Discord](https://img.shields.io/discord/898363402467045416?color=5865f2&label=discord)](https://discord.gg/NNJ5RgVBC5)
## Usage
@ -46,16 +47,22 @@ This library provides manga sources.
3. Usage in code
```kotlin
val parser = mangaLoaderContext.newParserInstance(MangaSource.MANGADEX)
val parser = mangaLoaderContext.newParserInstance(MangaParserSource.MANGADEX)
```
`mangaLoaderContext` is an implementation of the `MangaLoaderContext` class.
See examples
of [Android](https://github.com/KotatsuApp/Kotatsu/blob/devel/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MangaLoaderContextImpl.kt)
and [Non-Android](https://github.com/KotatsuApp/kotatsu-dl/blob/master/src/jvmMain/kotlin/org/koitharu/kotatsu_dl/logic/MangaLoaderContextImpl.kt)
and [Non-Android](https://github.com/KotatsuApp/kotatsu-dl/blob/master/src/main/kotlin/org/koitharu/kotatsu/dl/parsers/MangaLoaderContextImpl.kt)
implementation.
Note that the `MangaSource.LOCAL` and `MangaSource.DUMMY` parsers cannot be instantiated.
## Projects that use the library
- [Kotatsu](https://github.com/KotatsuApp/Kotatsu)
- [Doki](https://github.com/DokiTeam/Doki)
- [kotatsu-dl](https://github.com/KotatsuApp/kotatsu-dl)
- [Shirizu (WIP)](https://github.com/ztimms73/shirizu)
- [OtakuWorld](https://github.com/jakepurple13/OtakuWorld)
## Contribution

@ -1,72 +0,0 @@
import tasks.ReportGenerateTask
plugins {
id 'java-library'
id 'org.jetbrains.kotlin.jvm' version '1.9.22'
id 'com.google.devtools.ksp' version '1.9.22-1.0.17'
id 'maven-publish'
}
group = 'org.koitharu'
version = '1.0'
test {
useJUnitPlatform()
}
compileKotlin {
kotlinOptions {
freeCompilerArgs += [
'-opt-in=kotlin.RequiresOptIn',
'-opt-in=kotlin.contracts.ExperimentalContracts',
'-opt-in=kotlinx.coroutines.ExperimentalCoroutinesApi',
'-opt-in=org.koitharu.kotatsu.parsers.InternalParsersApi',
]
}
}
compileTestKotlin {
kotlinOptions {
freeCompilerArgs += [
'-opt-in=kotlin.RequiresOptIn',
'-opt-in=kotlinx.coroutines.ExperimentalCoroutinesApi',
'-opt-in=org.koitharu.kotatsu.parsers.InternalParsersApi',
]
}
}
kotlin {
jvmToolchain(8)
sourceSets {
main.kotlin.srcDirs += 'build/generated/ksp/main/kotlin'
}
}
afterEvaluate {
publishing {
publications {
mavenJava(MavenPublication) {
from components.java
}
}
}
}
dependencies {
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.3'
implementation 'com.squareup.okhttp3:okhttp:4.12.0'
implementation 'com.squareup.okio:okio:3.7.0'
api 'org.jsoup:jsoup:1.17.2'
implementation 'org.json:json:20231013'
implementation 'androidx.collection:collection:1.4.0'
ksp project(':kotatsu-parsers-ksp')
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.1'
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.10.1'
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.10.1'
testImplementation 'org.jetbrains.kotlinx:kotlinx-coroutines-test:1.7.3'
testImplementation 'io.webfolder:quickjs:1.1.0'
}
tasks.register('generateTestsReport', ReportGenerateTask)

@ -0,0 +1,63 @@
import tasks.ReportGenerateTask
plugins {
`java-library`
`maven-publish`
alias(libs.plugins.kotlin.jvm)
alias(libs.plugins.ksp)
}
group = "org.koitharu"
version = "1.0"
tasks.test {
useJUnitPlatform()
}
ksp {
arg("summaryOutputDir", "${projectDir}/.github")
}
tasks.withType<org.jetbrains.kotlin.gradle.tasks.KotlinCompile>().configureEach {
compilerOptions {
freeCompilerArgs.addAll(
"-opt-in=kotlin.RequiresOptIn",
"-opt-in=kotlin.contracts.ExperimentalContracts",
"-opt-in=kotlinx.coroutines.ExperimentalCoroutinesApi",
"-opt-in=org.koitharu.kotatsu.parsers.InternalParsersApi",
)
}
}
kotlin {
jvmToolchain(8)
explicitApiWarning()
sourceSets["main"].kotlin.srcDirs("build/generated/ksp/main/kotlin")
}
publishing {
publications {
create<MavenPublication>("mavenJava") {
from(components["java"])
}
}
}
dependencies {
implementation(libs.kotlinx.coroutines.core)
implementation(libs.okhttp)
implementation(libs.okio)
implementation(libs.json)
implementation(libs.androidx.collection)
api(libs.jsoup)
ksp(project(":kotatsu-parsers-ksp"))
testImplementation(libs.junit.api)
testImplementation(libs.junit.engine)
testImplementation(libs.junit.params)
testImplementation(libs.kotlinx.coroutines.test)
testImplementation(libs.quickjs)
}
tasks.register<ReportGenerateTask>("generateTestsReport")

@ -1,18 +0,0 @@
plugins {
id 'org.jetbrains.kotlin.jvm' version '1.9.22'
}
repositories {
mavenCentral()
}
kotlin {
jvmToolchain(8)
}
dependencies {
implementation gradleApi()
implementation 'org.simpleframework:simple-xml:2.7.1'
implementation 'com.soywiz.korlibs.korte:korte-jvm:4.0.10'
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.3'
}

@ -0,0 +1,18 @@
plugins {
kotlin("jvm") version "2.2.10"
}
repositories {
mavenCentral()
}
kotlin {
jvmToolchain(8)
}
dependencies {
implementation(gradleApi())
implementation("org.simpleframework:simple-xml:2.7.1")
implementation("com.soywiz.korlibs.korte:korte-jvm:4.0.10")
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.10.2")
}

@ -1,5 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
distributionSha256Sum=bd71102213493060956ec229d946beee57158dbd89d0e62b91bca0fa2c5f3531
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

@ -0,0 +1,804 @@
<?xml version="1.0" encoding="utf-8"?>
<gaphor xmlns="https://gaphor.org/model" xmlns:Core="https://gaphor.org/modelinglanguage/Core" xmlns:UML="https://gaphor.org/modelinglanguage/UML" xmlns:general="https://gaphor.org/modelinglanguage/general" version="4" gaphor-version="3.1.0">
<model>
<Core:StyleSheet id="58d6989a-66f8-11ec-b4c8-0456e5e540ed"/>
<UML:Package id="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed">
<name>
<val>Новая модель</val>
</name>
<ownedDiagram>
<reflist>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</reflist>
</ownedDiagram>
<ownedType>
<reflist>
<ref refid="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8"/>
<ref refid="a300f58a-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="ad4c68d0-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="198a3108-f5be-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="32081654-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</ownedType>
<packagedElement>
<reflist>
<ref refid="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8"/>
<ref refid="a300f58a-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="ad4c68d0-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="198a3108-f5be-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="32081654-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="25a17c58-f5be-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="41318a02-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</packagedElement>
</UML:Package>
<UML:Diagram id="58d6c536-66f8-11ec-b4c8-0456e5e540ed">
<element>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</element>
<name>
<val>Новая диаграмма</val>
</name>
<ownedPresentation>
<reflist>
<ref refid="0b54edc4-f59d-11ef-bfb1-4cbb5880a0b8"/>
<ref refid="a3018fc2-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="ad4d2aae-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="198aace6-f5be-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="320868c0-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="b760bcd4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="bff32eae-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="d5113ca4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="e8042ad8-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="531831f2-f5c0-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="5ccae8d4-f5c0-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="b6e0240e-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="254e40f6-f5be-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="2bb6e87a-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="40e36d2c-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="bcc07c64-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="f6b48e4c-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="f89ba010-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="fabe0540-f5bf-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="6f993af6-f5c0-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</ownedPresentation>
</UML:Diagram>
<UML:Class id="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8">
<clientDependency>
<reflist>
<ref refid="25a17c58-f5be-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</clientDependency>
<comment>
<reflist>
<ref refid="bff31afe-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</comment>
<isAbstract>
<val>1</val>
</isAbstract>
<name>
<val>AbstractMangaParser</val>
</name>
<owningPackage>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</owningPackage>
<package>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</package>
<presentation>
<reflist>
<ref refid="0b54edc4-f59d-11ef-bfb1-4cbb5880a0b8"/>
</reflist>
</presentation>
<specialization>
<reflist>
<ref refid="b969dac6-f5bd-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="2c236356-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</specialization>
</UML:Class>
<UML:ClassItem id="0b54edc4-f59d-11ef-bfb1-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 405.16796875, 388.8671875)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>158.0</val>
</width>
<height>
<val>60.0</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<show_attributes>
<val>0</val>
</show_attributes>
<show_operations>
<val>0</val>
</show_operations>
<subject>
<ref refid="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8"/>
</subject>
</UML:ClassItem>
<UML:Class id="a300f58a-f5bd-11ef-9ec2-4cbb5880a0b8">
<comment>
<reflist>
<ref refid="d5112a70-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</comment>
<generalization>
<reflist>
<ref refid="b969dac6-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</generalization>
<isAbstract>
<val>1</val>
</isAbstract>
<name>
<val>PagedMangaParser</val>
</name>
<owningPackage>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</owningPackage>
<package>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</package>
<presentation>
<reflist>
<ref refid="a3018fc2-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Class>
<UML:ClassItem id="a3018fc2-f5bd-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 476.3368367667698, 525.76953125)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>142.0</val>
</width>
<height>
<val>60.0</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<show_attributes>
<val>0</val>
</show_attributes>
<show_operations>
<val>0</val>
</show_operations>
<subject>
<ref refid="a300f58a-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:ClassItem>
<UML:Class id="ad4c68d0-f5bd-11ef-9ec2-4cbb5880a0b8">
<comment>
<reflist>
<ref refid="e80418f4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</comment>
<generalization>
<reflist>
<ref refid="2c236356-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</generalization>
<isAbstract>
<val>1</val>
</isAbstract>
<name>
<val>SinglePageMangaParser</val>
</name>
<owningPackage>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</owningPackage>
<package>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</package>
<presentation>
<reflist>
<ref refid="ad4d2aae-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Class>
<UML:ClassItem id="ad4d2aae-f5bd-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 405.16796875, 627.46875)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>175.0</val>
</width>
<height>
<val>60.0</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<show_attributes>
<val>0</val>
</show_attributes>
<show_operations>
<val>0</val>
</show_operations>
<subject>
<ref refid="ad4c68d0-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:ClassItem>
<UML:GeneralizationItem id="b6e0240e-f5bd-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<subject>
<ref refid="b969dac6-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</subject>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 499.2109069824219, 463.45703125)</val>
</matrix>
<points>
<val>[(28.486861756586336, 62.3125), (25.111328125, -14.58984375)]</val>
</points>
<head-connection>
<ref refid="a3018fc2-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="0b54edc4-f59d-11ef-bfb1-4cbb5880a0b8"/>
</tail-connection>
</UML:GeneralizationItem>
<UML:Generalization id="b969dac6-f5bd-11ef-9ec2-4cbb5880a0b8">
<general>
<ref refid="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8"/>
</general>
<presentation>
<reflist>
<ref refid="b6e0240e-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
<specific>
<ref refid="a300f58a-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</specific>
</UML:Generalization>
<UML:Interface id="198a3108-f5be-11ef-9ec2-4cbb5880a0b8">
<name>
<val>MangaParser</val>
</name>
<owningPackage>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</owningPackage>
<package>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</package>
<presentation>
<reflist>
<ref refid="198aace6-f5be-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
<supplierDependency>
<reflist>
<ref refid="25a17c58-f5be-11ef-9ec2-4cbb5880a0b8"/>
<ref refid="41318a02-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</supplierDependency>
</UML:Interface>
<UML:InterfaceItem id="198aace6-f5be-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 278.00391387939453, 232.92578125)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>105.0</val>
</width>
<height>
<val>80.0</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<show_attributes>
<val>0</val>
</show_attributes>
<show_operations>
<val>0</val>
</show_operations>
<subject>
<ref refid="198a3108-f5be-11ef-9ec2-4cbb5880a0b8"/>
</subject>
<folded>
<val>0</val>
</folded>
</UML:InterfaceItem>
<UML:InterfaceRealizationItem id="254e40f6-f5be-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<subject>
<ref refid="25a17c58-f5be-11ef-9ec2-4cbb5880a0b8"/>
</subject>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 306.1445007324219, 270.0625)</val>
</matrix>
<points>
<val>[(55.866059373910275, 42.86328125), (164.5765002560883, 118.8046875)]</val>
</points>
<head-connection>
<ref refid="198aace6-f5be-11ef-9ec2-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="0b54edc4-f59d-11ef-bfb1-4cbb5880a0b8"/>
</tail-connection>
</UML:InterfaceRealizationItem>
<UML:InterfaceRealization id="25a17c58-f5be-11ef-9ec2-4cbb5880a0b8">
<client>
<reflist>
<ref refid="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8"/>
</reflist>
</client>
<owningPackage>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</owningPackage>
<presentation>
<reflist>
<ref refid="254e40f6-f5be-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
<supplier>
<reflist>
<ref refid="198a3108-f5be-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</supplier>
</UML:InterfaceRealization>
<UML:GeneralizationItem id="2bb6e87a-f5bf-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<subject>
<ref refid="2c236356-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</subject>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 436.2929382324219, 439.1913757324219)</val>
</matrix>
<points>
<val>[(20.37646032737257, 188.27737426757812), (18.488327026367188, 9.675811767578125)]</val>
</points>
<head-connection>
<ref refid="ad4d2aae-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="0b54edc4-f59d-11ef-bfb1-4cbb5880a0b8"/>
</tail-connection>
</UML:GeneralizationItem>
<UML:Generalization id="2c236356-f5bf-11ef-9ec2-4cbb5880a0b8">
<general>
<ref refid="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8"/>
</general>
<presentation>
<reflist>
<ref refid="2bb6e87a-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
<specific>
<ref refid="ad4c68d0-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</specific>
</UML:Generalization>
<UML:Class id="32081654-f5bf-11ef-9ec2-4cbb5880a0b8">
<clientDependency>
<reflist>
<ref refid="41318a02-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</clientDependency>
<name>
<val>MangaParserWrapper</val>
</name>
<note>
<val></val>
</note>
<owningPackage>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</owningPackage>
<package>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</package>
<presentation>
<reflist>
<ref refid="320868c0-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Class>
<UML:ClassItem id="320868c0-f5bf-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 128.5008992667698, 410.48990205860804)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>158.0</val>
</width>
<height>
<val>60.0</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<show_attributes>
<val>0</val>
</show_attributes>
<show_operations>
<val>0</val>
</show_operations>
<subject>
<ref refid="32081654-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:ClassItem>
<UML:InterfaceRealizationItem id="40e36d2c-f5bf-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<subject>
<ref refid="41318a02-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</subject>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 306.0585632324219, 249.69920349121094)</val>
</matrix>
<points>
<val>[(11.759223915218172, 63.22657775878906), (-98.55766396565207, 160.7906985673971)]</val>
</points>
<head-connection>
<ref refid="198aace6-f5be-11ef-9ec2-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="320868c0-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</tail-connection>
</UML:InterfaceRealizationItem>
<UML:InterfaceRealization id="41318a02-f5bf-11ef-9ec2-4cbb5880a0b8">
<client>
<reflist>
<ref refid="32081654-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</client>
<owningPackage>
<ref refid="58d6c2e8-66f8-11ec-b4c8-0456e5e540ed"/>
</owningPackage>
<presentation>
<reflist>
<ref refid="40e36d2c-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
<supplier>
<reflist>
<ref refid="198a3108-f5be-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</supplier>
</UML:InterfaceRealization>
<UML:Comment id="b760ac44-f5bf-11ef-9ec2-4cbb5880a0b8">
<body>
<val>Used for providing external api. Do not use this class directly</val>
</body>
<presentation>
<reflist>
<ref refid="b760bcd4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Comment>
<UML:CommentItem id="b760bcd4-f5bf-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 108.0561294327963, 550.1347579956054)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>183.21868896484375</val>
</width>
<height>
<val>91.23829650878906</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<subject>
<ref refid="b760ac44-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:CommentItem>
<UML:CommentLineItem id="bcc07c64-f5bf-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 549.205520203852, 278.05499559311954)</val>
</matrix>
<points>
<val>[(-349.5400462886338, 192.4349064654885), (-349.5400462886338, 272.0797624024858)]</val>
</points>
<tail-connection>
<ref refid="b760bcd4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</tail-connection>
</UML:CommentLineItem>
<UML:Comment id="bff31afe-f5bf-11ef-9ec2-4cbb5880a0b8">
<annotatedElement>
<reflist>
<ref refid="0b54a350-f59d-11ef-bfb1-4cbb5880a0b8"/>
</reflist>
</annotatedElement>
<body>
<val>Extend this class if your manga source provides standard limit-offset based lists (get manga list by offset)</val>
</body>
<presentation>
<reflist>
<ref refid="bff32eae-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Comment>
<UML:CommentItem id="bff32eae-f5bf-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 673.0610499890082, 367.0515553989646)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>228.8028016098773</val>
</width>
<height>
<val>88.0</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<subject>
<ref refid="bff31afe-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:CommentItem>
<UML:Comment id="d5112a70-f5bf-11ef-9ec2-4cbb5880a0b8">
<annotatedElement>
<reflist>
<ref refid="a300f58a-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</annotatedElement>
<body>
<val>Extend this class if your manga source provides paged-based lists (get manga list by page number)</val>
</body>
<presentation>
<reflist>
<ref refid="d5113ca4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Comment>
<UML:CommentItem id="d5113ca4-f5bf-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 666.7924311664914, 507.7539062499999)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>214.34368896484375</val>
</width>
<height>
<val>88.0</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<subject>
<ref refid="d5112a70-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:CommentItem>
<UML:Comment id="e80418f4-f5bf-11ef-9ec2-4cbb5880a0b8">
<annotatedElement>
<reflist>
<ref refid="ad4c68d0-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</annotatedElement>
<body>
<val>Extend this class if your manga source does not provide pagination (all manga provided in one list)</val>
</body>
<presentation>
<reflist>
<ref refid="e8042ad8-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Comment>
<UML:CommentItem id="e8042ad8-f5bf-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 666.7924311664914, 560.9671898788581)</val>
</matrix>
<top-left>
<val>(0.0, 58.00435704705592)</val>
</top-left>
<width>
<val>263.9307954323941</val>
</width>
<height>
<val>78.01706672440287</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<subject>
<ref refid="e80418f4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:CommentItem>
<UML:CommentLineItem id="f6b48e4c-f5bf-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 561.8951626340418, 549.6101338901756)</val>
</matrix>
<points>
<val>[(56.44167413272805, 7.038279316310902), (104.89726853244963, 8.304008355003589)]</val>
</points>
<head-connection>
<ref refid="a3018fc2-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="d5113ca4-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</tail-connection>
</UML:CommentLineItem>
<UML:CommentLineItem id="f89ba010-f5bf-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 559.3873501340418, 413.0007588901755)</val>
</matrix>
<points>
<val>[(3.7806186159582467, 0.0), (113.67369985496646, 1.6012844908540842)]</val>
</points>
<head-connection>
<ref refid="0b54edc4-f59d-11ef-bfb1-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="bff32eae-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</tail-connection>
</UML:CommentLineItem>
<UML:CommentLineItem id="fabe0540-f5bf-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 522.3600063840418, 652.6882588901756)</val>
</matrix>
<points>
<val>[(57.80796236595825, 5.29182139794003), (144.43242478244963, 5.657840086725969)]</val>
</points>
<head-connection>
<ref refid="ad4d2aae-f5bd-11ef-9ec2-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="e8042ad8-f5bf-11ef-9ec2-4cbb5880a0b8"/>
</tail-connection>
</UML:CommentLineItem>
<general:Box id="531831f2-f5c0-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 375.05802564891326, 349.05453145170736)</val>
</matrix>
<top-left>
<val>(0.0, 3.15625)</val>
</top-left>
<width>
<val>590.6594026101285</val>
</width>
<height>
<val>368.44140625</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
</general:Box>
<UML:Comment id="5ccad4ca-f5c0-11ef-9ec2-4cbb5880a0b8">
<body>
<val>To create your own parser you have to extends one of these classes</val>
</body>
<presentation>
<reflist>
<ref refid="5ccae8d4-f5c0-11ef-9ec2-4cbb5880a0b8"/>
</reflist>
</presentation>
</UML:Comment>
<UML:CommentItem id="5ccae8d4-f5c0-11ef-9ec2-4cbb5880a0b8">
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 756.725301794198, 225.57697659840966)</val>
</matrix>
<top-left>
<val>(0.0, 0.0)</val>
</top-left>
<width>
<val>208.99212646484375</val>
</width>
<height>
<val>73.47482464883183</val>
</height>
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<subject>
<ref refid="5ccad4ca-f5c0-11ef-9ec2-4cbb5880a0b8"/>
</subject>
</UML:CommentItem>
<UML:CommentLineItem id="6f993af6-f5c0-11ef-9ec2-4cbb5880a0b8">
<diagram>
<ref refid="58d6c536-66f8-11ec-b4c8-0456e5e540ed"/>
</diagram>
<horizontal>
<val>False</val>
</horizontal>
<orthogonal>
<val>False</val>
</orthogonal>
<matrix>
<val>(1.0, 0.0, 0.0, 1.0, 943.6141683885666, 419.2772168262177)</val>
</matrix>
<points>
<val>[(-27.404961772030788, -67.06643537451032), (-27.404961772030788, -120.2254155789762)]</val>
</points>
<head-connection>
<ref refid="531831f2-f5c0-11ef-9ec2-4cbb5880a0b8"/>
</head-connection>
<tail-connection>
<ref refid="5ccae8d4-f5c0-11ef-9ec2-4cbb5880a0b8"/>
</tail-connection>
</UML:CommentLineItem>
</model>
</gaphor>

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

@ -1,2 +1,9 @@
# Following this blog:
# https://proandroiddev.com/how-we-reduced-our-gradle-build-times-by-over-80-51f2b6d6b05b
kotlin.code.style=official
org.gradle.jvmargs=-Xmx4g -XX:MaxMetaspaceSize=4096m
org.gradle.jvmargs=-Xmx4g -XX:MaxMetaspaceSize=4096m -XX:+UseParallelGC
org.gradle.vfs.watch=true
org.gradle.configureondemand=true
org.gradle.parallel=true
org.gradle.caching=true
org.gradle.unsafe.configuration-cache=true

@ -0,0 +1,29 @@
[versions]
kotlin = "2.2.10"
ksp = "2.2.10-2.0.2"
coroutines = "1.10.2"
junit = "5.10.1"
okhttp = "5.1.0"
okio = "3.16.0"
json = "20240303"
androidx-collection = "1.5.0"
jsoup = "1.21.2"
quickjs = "1.1.0"
[plugins]
kotlin-jvm = { id = "org.jetbrains.kotlin.jvm", version.ref = "kotlin" }
ksp = { id = "com.google.devtools.ksp", version.ref = "ksp" }
[libraries]
ksp-symbol-processing-api = { module = "com.google.devtools.ksp:symbol-processing-api", version.ref = "ksp" }
kotlinx-coroutines-core = { group = "org.jetbrains.kotlinx", name = "kotlinx-coroutines-core", version.ref = "coroutines" }
kotlinx-coroutines-test = { group = "org.jetbrains.kotlinx", name = "kotlinx-coroutines-test", version.ref = "coroutines" }
junit-api = { group = "org.junit.jupiter", name = "junit-jupiter-api", version.ref = "junit" }
junit-engine = { group = "org.junit.jupiter", name = "junit-jupiter-engine", version.ref = "junit" }
junit-params = { group = "org.junit.jupiter", name = "junit-jupiter-params", version.ref = "junit" }
okhttp = { module = "com.squareup.okhttp3:okhttp", version.ref = "okhttp" }
okio = { module = "com.squareup.okio:okio", version.ref = "okio" }
json = { module = "org.json:json", version.ref = "json" }
androidx-collection = { module = "androidx.collection:collection", version.ref = "androidx-collection" }
jsoup = { module = "org.jsoup:jsoup", version.ref = "jsoup" }
quickjs = { module = "io.webfolder:quickjs", version.ref = "quickjs" }

@ -1,5 +1,7 @@
#Wed Aug 27 01:56:37 ICT 2025
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip
distributionSha256Sum=bd71102213493060956ec229d946beee57158dbd89d0e62b91bca0fa2c5f3531
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

@ -1,11 +0,0 @@
plugins {
id 'org.jetbrains.kotlin.jvm'
}
kotlin {
jvmToolchain(8)
}
dependencies {
implementation 'com.google.devtools.ksp:symbol-processing-api:1.9.22-1.0.17'
}

@ -0,0 +1,11 @@
plugins {
alias(libs.plugins.kotlin.jvm)
}
kotlin {
jvmToolchain(8)
}
dependencies {
implementation(libs.ksp.symbol.processing.api)
}

@ -7,165 +7,180 @@ import com.google.devtools.ksp.symbol.KSAnnotated
import com.google.devtools.ksp.symbol.KSClassDeclaration
import com.google.devtools.ksp.symbol.KSVisitorVoid
import com.google.devtools.ksp.validate
import java.io.File
import java.io.Writer
import java.util.*
class ParserProcessor(
private val codeGenerator: CodeGenerator,
private val logger: KSPLogger,
private val options: Map<String, String>,
private val codeGenerator: CodeGenerator,
private val logger: KSPLogger,
private val options: Map<String, String>,
) : SymbolProcessor {
private val availableLocales = Locale.getAvailableLocales().toSet()
private val sourceNamePattern = Regex("[A-Z_][A-Z0-9_]{3,}")
override fun process(resolver: Resolver): List<KSAnnotated> {
val symbols = resolver.getSymbolsWithAnnotation("org.koitharu.kotatsu.parsers.MangaSourceParser")
val ret = symbols.filterNot { it.validate() }.toList()
if (!symbols.iterator().hasNext()) {
return ret
}
val dependencies = Dependencies.ALL_FILES
val factoryFile =
try {
codeGenerator.createNewFile(
dependencies = dependencies,
packageName = "org.koitharu.kotatsu.parsers",
fileName = "MangaParserFactory",
)
} catch (e: FileAlreadyExistsException) {
logger.warn(e.toString(), null)
null
}
val sourcesFile =
try {
codeGenerator.createNewFile(
dependencies = dependencies,
packageName = "org.koitharu.kotatsu.parsers.model",
fileName = "MangaSource",
)
} catch (e: FileAlreadyExistsException) {
logger.warn(e.toString(), null)
null
}
val totalCount = sourcesFile?.writer().use { sourcesWriter ->
factoryFile?.writer().use { factoryWriter ->
writeContent(sourcesWriter, factoryWriter, symbols)
}
}
writeSummary(totalCount)
return ret
}
private fun writeContent(
sourcesWriter: Writer?,
factoryWriter: Writer?,
symbols: Sequence<KSAnnotated>,
): Int {
if (sourcesWriter == null && factoryWriter == null) {
return 0
}
factoryWriter?.write(
"""
package org.koitharu.kotatsu.parsers
import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.core.MangaParserWrapper
internal fun MangaParserSource.newParser(context: MangaLoaderContext): MangaParser = when (this) {
private val availableLocales = Locale.getAvailableLocales().toSet()
private val sourceNamePattern = Regex("[A-Z_][A-Z0-9_]{3,}")
override fun process(resolver: Resolver): List<KSAnnotated> {
val symbols = resolver
.getSymbolsWithAnnotation("org.koitharu.kotatsu.parsers.MangaSourceParser")
val ret = symbols.filterNot { it.validate() }.toList()
if (!symbols.iterator().hasNext()) {
return ret
}
val dependencies = Dependencies.ALL_FILES
val factoryFile = try {
codeGenerator.createNewFile(
dependencies = dependencies,
packageName = "org.koitharu.kotatsu.parsers",
fileName = "MangaParserFactory",
)
} catch (e: FileAlreadyExistsException) {
logger.warn(e.toString(), null)
null
}
val sourcesFile = try {
codeGenerator.createNewFile(
dependencies = dependencies,
packageName = "org.koitharu.kotatsu.parsers.model",
fileName = "MangaSource",
)
} catch (e: FileAlreadyExistsException) {
logger.warn(e.toString(), null)
null
}
sourcesFile?.writer().use { sourcesWriter ->
factoryFile?.writer().use { factoryWriter ->
writeContent(sourcesWriter, factoryWriter, symbols)
}
}
return ret
}
private fun writeContent(
sourcesWriter: Writer?,
factoryWriter: Writer?,
symbols: Sequence<KSAnnotated>,
) {
if (sourcesWriter == null && factoryWriter == null) {
return
}
factoryWriter?.write(
"""
package org.koitharu.kotatsu.parsers
import org.koitharu.kotatsu.parsers.model.MangaSource
@Suppress("DEPRECATION")
@InternalParsersApi
@Deprecated("", replaceWith = ReplaceWith("context.newParserInstance(this)"))
fun MangaSource.newParser(context: MangaLoaderContext): MangaParser = when (this) {
""".trimIndent(),
)
//language=kotlin
sourcesWriter?.write(
"""
package org.koitharu.kotatsu.parsers.model
enum class MangaSource(
val title: String,
val locale: String?,
val contentType: ContentType,
) {
LOCAL("Local", null, ContentType.OTHER),
)
sourcesWriter?.write(
"""
package org.koitharu.kotatsu.parsers.model
public enum class MangaParserSource(
public val title: String,
public val locale: String,
public val contentType: ContentType,
public val isBroken: Boolean,
): MangaSource {
""".trimIndent(),
)
val visitor = ParserVisitor(sourcesWriter, factoryWriter)
symbols
.filter { it is KSClassDeclaration && it.validate() }
.forEach { it.accept(visitor, Unit) }
factoryWriter?.write(
"""
MangaSource.LOCAL -> throw NotImplementedError("Local manga parser is not supported")
MangaSource.DUMMY -> throw NotImplementedError("Dummy manga parser cannot be instantiated")
}.also {
)
val visitor = ParserVisitor(sourcesWriter, factoryWriter)
val totalCount = symbols
.filter { it is KSClassDeclaration && it.validate() }
.onEach { it.accept(visitor, Unit) }
.count()
factoryWriter?.write(
"""
}.let {
require(it.source == this) {
"Cannot instantiate manga parser: ${'$'}name mapped to ${'$'}{it.source}"
}
MangaParserWrapper(it)
}
""".trimIndent(),
)
sourcesWriter?.write(
"""
DUMMY("Dummy", null, ContentType.OTHER),
)
sourcesWriter?.write(
"""
;
}
""".trimIndent(),
)
}
)
return totalCount
}
private inner class ParserVisitor(
private val sourcesWriter: Writer?,
private val factoryWriter: Writer?,
) : KSVisitorVoid() {
private fun writeSummary(totalCount: Int) {
val file = File(options["summaryOutputDir"] ?: return, "summary.yaml")
file.writeText("total: $totalCount")
}
private val titles = HashMap<String, String>()
private inner class ParserVisitor(
private val sourcesWriter: Writer?,
private val factoryWriter: Writer?,
) : KSVisitorVoid() {
private val titles = HashMap<String, String>()
override fun visitClassDeclaration(classDeclaration: KSClassDeclaration, data: Unit) {
if (classDeclaration.classKind != ClassKind.CLASS || classDeclaration.isAbstract()) {
logger.error("Only non-abstract can be annotated with @MangaSourceParser", classDeclaration)
}
val annotation = classDeclaration.annotations.single { it.shortName.asString() == "MangaSourceParser" }
val deprecation = classDeclaration.annotations.singleOrNull { it.shortName.asString() == "Deprecated" }
val name = annotation.arguments.single { it.name?.asString() == "name" }.value as String
val title = annotation.arguments.single { it.name?.asString() == "title" }.value as String
val locale = annotation.arguments.single { it.name?.asString() == "locale" }.value as String
val type = annotation.arguments.single { it.name?.asString() == "type" }.value
val localeString = if (locale.isEmpty()) "null" else "\"$locale\""
val localeObj = if (locale.isEmpty()) null else Locale(locale)
val localeTitle = localeObj?.getDisplayLanguage(localeObj)
if (localeObj != null && localeObj !in availableLocales) {
logger.error("Manga source $name has wrong locale: $localeTitle")
}
override fun visitClassDeclaration(
classDeclaration: KSClassDeclaration,
data: Unit,
) {
if (classDeclaration.classKind != ClassKind.CLASS || classDeclaration.isAbstract()) {
logger.error("Only non-abstract can be annotated with @MangaSourceParser", classDeclaration)
}
val annotation = classDeclaration.annotations.single { it.shortName.asString() == "MangaSourceParser" }
val deprecation = classDeclaration.annotations.singleOrNull { it.shortName.asString() == "Deprecated" }
val isBroken = classDeclaration.annotations.any { it.shortName.asString() == "Broken" }
val name = annotation.arguments.single { it.name?.asString() == "name" }.value as String
val title = annotation.arguments.single { it.name?.asString() == "title" }.value as String
val locale = annotation.arguments.single { it.name?.asString() == "locale" }.value as String
val type = annotation.arguments.single { it.name?.asString() == "type" }.value
val localeString = "\"$locale\""
val localeObj = if (locale.isEmpty()) null else Locale(locale)
val localeTitle = localeObj?.getDisplayLanguage(localeObj)
if (localeObj != null && localeObj !in availableLocales) {
logger.error("Manga source $name has wrong locale: $localeTitle")
}
if (!sourceNamePattern.matches(name)) {
logger.error("Manga source name must be uppercase: $name")
}
if (!sourceNamePattern.matches(name)) {
logger.error("Manga source name must be uppercase: $name")
}
val constructor = classDeclaration.primaryConstructor
if (constructor == null || constructor.parameters.count { !it.hasDefault } != 1) {
logger.error(
"Class with @MangaSourceParser must have a primary constructor with one parameter",
classDeclaration,
)
}
val className = checkNotNull(classDeclaration.qualifiedName?.asString()) { "Class name is null" }
val constructor = classDeclaration.primaryConstructor
if (constructor == null || constructor.parameters.count { !it.hasDefault } != 1) {
logger.error(
"Class with @MangaSourceParser must have a primary constructor with one parameter",
classDeclaration,
)
}
val className = checkNotNull(classDeclaration.qualifiedName?.asString()) { "Class name is null" }
val prevTitleClass = titles.put(title, className)
if (prevTitleClass != null) {
logger.warn("Source title duplication: \"$title\" is assigned to both $prevTitleClass and $className")
}
val prevTitleClass = titles.put(title, className)
if (prevTitleClass != null) {
logger.warn("Source title duplication: \"$title\" is assigned to both $prevTitleClass and $className")
}
factoryWriter?.write("\tMangaSource.$name -> $className(context)\n")
val deprecationString = if (deprecation != null) {
val reason = deprecation.arguments
.find { it.name?.asString() == "message" }?.value?.toString() ?: "Unknown reason"
"@Deprecated(\"$reason\") "
} else ""
val localeComment = localeTitle?.toTitleCase(localeObj)?.let { " /* $it */" }.orEmpty()
sourcesWriter?.write("\t$deprecationString$name(\"$title\", $localeString$localeComment, ContentType.$type),\n")
}
}
factoryWriter?.write("\tMangaParserSource.$name -> $className(context)\n")
val deprecationString =
if (deprecation != null) {
val reason =
deprecation.arguments
.find { it.name?.asString() == "message" }
?.value
?.toString() ?: "Unknown reason"
"@Deprecated(\"$reason\") "
} else {
""
}
val localeComment = localeTitle?.toTitleCase(localeObj)?.let { " /* $it */" }.orEmpty()
sourcesWriter?.write(
"\t$deprecationString$name(\"$title\", $localeString$localeComment, $type, $isBroken),\n",
)
}
}
}

@ -1,19 +0,0 @@
pluginManagement {
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
dependencyResolutionManagement {
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
rootProject.name = 'kotatsu-parsers'
include 'kotatsu-parsers-ksp'

@ -0,0 +1,18 @@
pluginManagement {
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
dependencyResolutionManagement {
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
rootProject.name = "kotatsu-parsers"
include("kotatsu-parsers-ksp")

@ -0,0 +1,14 @@
package org.koitharu.kotatsu.parsers
/**
* Annotate [MangaParser] implementation to mark this parser as broken instead of removing it
*/
@Target(AnnotationTarget.CLASS)
@Retention(AnnotationRetention.SOURCE)
internal annotation class Broken(
/**
* Reason why this parser is broken
*/
val message: String = "",
)

@ -1,14 +1,18 @@
package org.koitharu.kotatsu.parsers
object ErrorMessages {
public object ErrorMessages {
const val FILTER_MULTIPLE_STATES_NOT_SUPPORTED = "Multiple states are not supported by this source"
const val FILTER_MULTIPLE_GENRES_NOT_SUPPORTED = "Multiple genres are not supported by this source"
const val FILTER_MULTIPLE_CONTENT_RATING_NOT_SUPPORTED =
"Multiple Content Rating are not supported by this source"
const val FILTER_BOTH_LOCALE_GENRES_NOT_SUPPORTED =
"Filtering by both genres and locale is not supported by this source"
const val FILTER_BOTH_STATES_GENRES_NOT_SUPPORTED =
"Filtering by both genres and states is not supported by this source"
const val SEARCH_NOT_SUPPORTED = "Search is not supported by this source"
public const val FILTER_MULTIPLE_STATES_NOT_SUPPORTED: String = "Multiple states are not supported by this source"
public const val FILTER_MULTIPLE_GENRES_NOT_SUPPORTED: String = "Multiple genres are not supported by this source"
public const val FILTER_MULTIPLE_CONTENT_RATING_NOT_SUPPORTED: String =
"Multiple Content ratings are not supported by this source"
public const val FILTER_MULTIPLE_CONTENT_TYPES_NOT_SUPPORTED: String =
"Multiple Content types are not supported by this source"
public const val FILTER_MULTIPLE_DEMOGRAPHICS_NOT_SUPPORTED: String =
"Multiple Demographics are not supported by this source"
public const val FILTER_BOTH_LOCALE_GENRES_NOT_SUPPORTED: String =
"Filtering by both genres and locale is not supported by this source"
public const val FILTER_BOTH_STATES_GENRES_NOT_SUPPORTED: String =
"Filtering by both genres and states is not supported by this source"
public const val SEARCH_NOT_SUPPORTED: String = "Search is not supported by this source"
}

@ -11,4 +11,4 @@ package org.koitharu.kotatsu.parsers
@SinceKotlin("1.3")
@RequiresOptIn
@MustBeDocumented
annotation class InternalParsersApi
public annotation class InternalParsersApi

@ -1,32 +1,78 @@
package org.koitharu.kotatsu.parsers
import okhttp3.CookieJar
import okhttp3.HttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.OkHttpClient
import okhttp3.Response
import org.koitharu.kotatsu.parsers.bitmap.Bitmap
import org.koitharu.kotatsu.parsers.config.MangaSourceConfig
import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.util.LinkResolver
import java.util.*
abstract class MangaLoaderContext {
public abstract class MangaLoaderContext {
abstract val httpClient: OkHttpClient
public abstract val httpClient: OkHttpClient
abstract val cookieJar: CookieJar
public abstract val cookieJar: CookieJar
@Suppress("DEPRECATION")
fun newParserInstance(source: MangaSource): MangaParser = source.newParser(this)
public fun newParserInstance(source: MangaParserSource): MangaParser = source.newParser(this)
open fun encodeBase64(data: ByteArray): String = Base64.getEncoder().encodeToString(data)
public fun newLinkResolver(link: HttpUrl): LinkResolver = LinkResolver(this, link)
open fun decodeBase64(data: String): ByteArray = Base64.getDecoder().decode(data)
public fun newLinkResolver(link: String): LinkResolver = newLinkResolver(link.toHttpUrl())
open fun getPreferredLocales(): List<Locale> = listOf(Locale.getDefault())
public open fun encodeBase64(data: ByteArray): String = Base64.getEncoder().encodeToString(data)
public open fun decodeBase64(data: String): ByteArray = Base64.getDecoder().decode(data)
public open fun getPreferredLocales(): List<Locale> = listOf(Locale.getDefault())
/**
* Execute JavaScript code and return result
* @param script JavaScript source code
* @return execution result as string, may be null
*/
abstract suspend fun evaluateJs(script: String): String?
@Deprecated("Provide a base url")
public abstract suspend fun evaluateJs(script: String): String?
/**
* Execute JavaScript code and return result
* @param script JavaScript source code
* @param baseUrl url of page script will be executed in context of
* @return execution result as string, may be null
*/
public abstract suspend fun evaluateJs(baseUrl: String, script: String): String?
/**
* Open [url] in browser for some external action (e.g. captcha solving or non cookie-based authorization)
*/
public open fun requestBrowserAction(parser: MangaParser, url: String): Nothing {
throw UnsupportedOperationException("Browser is not available")
}
public abstract fun getConfig(source: MangaSource): MangaSourceConfig
abstract fun getConfig(source: MangaSource): MangaSourceConfig
public abstract fun getDefaultUserAgent(): String
/**
* Helper function to be used in an interceptor
* to descramble images
* @param response Image response
* @param redraw lambda function to implement descrambling logic
*/
public abstract fun redrawImageResponse(
response: Response,
redraw: (image: Bitmap) -> Bitmap,
): Response
/**
* create a new empty Bitmap with given dimensions
*/
public abstract fun createBitmap(
width: Int,
height: Int,
): Bitmap
}

@ -1,253 +1,88 @@
package org.koitharu.kotatsu.parsers
import androidx.annotation.CallSuper
import androidx.annotation.VisibleForTesting
import okhttp3.Headers
import okhttp3.HttpUrl
import okhttp3.Interceptor
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.config.MangaSourceConfig
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.network.OkHttpWebClient
import org.koitharu.kotatsu.parsers.network.UserAgents
import org.koitharu.kotatsu.parsers.network.WebClient
import org.koitharu.kotatsu.parsers.util.FaviconParser
import org.koitharu.kotatsu.parsers.util.RelatedMangaFinder
import org.koitharu.kotatsu.parsers.util.domain
import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQuery
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQueryCapabilities
import org.koitharu.kotatsu.parsers.util.LinkResolver
import org.koitharu.kotatsu.parsers.util.convertToMangaSearchQuery
import org.koitharu.kotatsu.parsers.util.toMangaListFilterCapabilities
import java.util.*
abstract class MangaParser @InternalParsersApi constructor(
@property:InternalParsersApi val context: MangaLoaderContext,
val source: MangaSource,
) {
public interface MangaParser : Interceptor {
/**
* Supported [SortOrder] variants. Must not be empty.
*
* For better performance use [EnumSet] for more than one item.
*/
abstract val availableSortOrders: Set<SortOrder>
public val source: MangaParserSource
/**
* Supported [MangaState] variants for filtering. May be empty.
* Supported [SortOrder] variants. Must not be empty.
*
* For better performance use [EnumSet] for more than one item.
*/
open val availableStates: Set<MangaState>
get() = emptySet()
open val availableContentRating: Set<ContentRating>
get() = emptySet()
/**
* Whether parser supports filtering by more than one tag
*/
open val isMultipleTagsSupported: Boolean = true
/**
* Whether parser supports tagsExclude field in filter
*/
open val isTagsExclusionSupported: Boolean = false
/**
* Whether parser supports searching by string query using [MangaListFilter.Search]
*/
open val isSearchSupported: Boolean = true
public val availableSortOrders: Set<SortOrder>
@Deprecated(
message = "Use availableSortOrders instead",
replaceWith = ReplaceWith("availableSortOrders"),
)
open val sortOrders: Set<SortOrder>
get() = availableSortOrders
@Deprecated("Too complex. Use filterCapabilities instead")
public val searchQueryCapabilities: MangaSearchQueryCapabilities
val config by lazy { context.getConfig(source) }
public val filterCapabilities: MangaListFilterCapabilities
open val sourceLocale: Locale
get() = source.locale?.let { Locale(it) } ?: Locale.ROOT
public val config: MangaSourceConfig
val isNsfwSource = source.contentType == ContentType.HENTAI
public val authorizationProvider: MangaParserAuthProvider?
get() = this as? MangaParserAuthProvider
/**
* Provide default domain and available alternatives, if any.
*
* Never hardcode domain in requests, use [domain] instead.
*/
@InternalParsersApi
abstract val configKeyDomain: ConfigKey.Domain
open val headers: Headers = Headers.Builder()
.add("User-Agent", UserAgents.CHROME_MOBILE)
.build()
/**
* Used as fallback if value of `sortOrder` passed to [getList] is null
*/
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
open val defaultSortOrder: SortOrder
get() {
val supported = availableSortOrders
return SortOrder.entries.first { it in supported }
}
@JvmField
protected val webClient: WebClient = OkHttpWebClient(context.httpClient, source)
public val configKeyDomain: ConfigKey.Domain
/**
* Parse list of manga by specified criteria
*
* @param offset starting from 0 and used for pagination.
* Note than passed value may not be divisible by internal page size, so you should adjust it manually.
* @param query search query, may be null or empty if no search needed
* @param tags genres for filtering, values from [getAvailableTags] and [Manga.tags]. May be null or empty
* @param sortOrder one of [availableSortOrders] or null for default value
*/
@JvmSynthetic
@InternalParsersApi
@Deprecated(
"Use getList with filter instead",
replaceWith = ReplaceWith("getList(offset, filter)"),
)
open suspend fun getList(
offset: Int,
query: String?,
tags: Set<MangaTag>?,
tagsExclude: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> = throw NotImplementedError("Please implement getList(offset, filter) instead")
public val domain: String
/**
* Parse list of manga with search by text query
*
* @param offset starting from 0 and used for pagination.
* @param query search query
*/
@Deprecated(
"Use getList with filter instead",
ReplaceWith(
"getList(offset, MangaListFilter.Search(query))",
"org.koitharu.kotatsu.parsers.model.MangaListFilter",
),
)
open suspend fun getList(offset: Int, query: String): List<Manga> {
return getList(offset, MangaListFilter.Search(query))
}
@Deprecated("Too complex. Use getList with filter instead")
public suspend fun getList(query: MangaSearchQuery): List<Manga>
/**
* Parse list of manga by specified criteria
*
* @param offset starting from 0 and used for pagination.
* Note than passed value may not be divisible by internal page size, so you should adjust it manually.
* @param tags genres for filtering, values from [getAvailableTags] and [Manga.tags]. May be null or empty
* @param sortOrder one of [availableSortOrders] or null for default value
*/
@Deprecated(
"Use getList with filter instead",
ReplaceWith(
"getList(offset, MangaListFilter.Advanced(sortOrder, tags, null, emptySet()))",
"org.koitharu.kotatsu.parsers.model.MangaListFilter",
),
)
open suspend fun getList(
offset: Int,
tags: Set<MangaTag>?,
tagsExclude: Set<MangaTag>?,
sortOrder: SortOrder?,
): List<Manga> {
return getList(
offset,
MangaListFilter.Advanced(
sortOrder = sortOrder ?: defaultSortOrder,
tags = tags.orEmpty(),
tagsExclude = tagsExclude.orEmpty(),
locale = null,
states = emptySet(),
contentRating = emptySet(),
),
)
}
@Suppress("DEPRECATION")
open suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
return when (filter) {
is MangaListFilter.Advanced -> getList(
offset = offset,
query = null,
tags = filter.tags,
tagsExclude = filter.tagsExclude,
sortOrder = filter.sortOrder,
)
is MangaListFilter.Search -> getList(
offset = offset,
query = filter.query,
tags = null,
tagsExclude = null,
sortOrder = defaultSortOrder,
)
null -> getList(
offset = offset,
query = null,
tags = null,
tagsExclude = null,
sortOrder = defaultSortOrder,
)
}
}
public suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga>
/**
* Parse details for [Manga]: chapters list, description, large cover, etc.
* Must return the same manga, may change any fields excepts id, url and source
* @see Manga.copy
*/
abstract suspend fun getDetails(manga: Manga): Manga
public suspend fun getDetails(manga: Manga): Manga
/**
* Parse pages list for specified chapter.
* @see MangaPage for details
*/
abstract suspend fun getPages(chapter: MangaChapter): List<MangaPage>
public suspend fun getPages(chapter: MangaChapter): List<MangaPage>
/**
* Fetch direct link to the page image.
*/
open suspend fun getPageUrl(page: MangaPage): String = page.url.toAbsoluteUrl(domain)
public suspend fun getPageUrl(page: MangaPage): String
/**
* Fetch available tags (genres) for source
*/
abstract suspend fun getAvailableTags(): Set<MangaTag>
public suspend fun getFilterOptions(): MangaListFilterOptions
/**
* Fetch available locales for multilingual sources
* Parse favicons from the main page of the source`s website
*/
open suspend fun getAvailableLocales(): Set<Locale> = emptySet()
public suspend fun getFavicons(): Favicons
public fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>)
public suspend fun getRelatedManga(seed: Manga): List<Manga>
@Deprecated(
message = "Use getAvailableTags instead",
replaceWith = ReplaceWith("getAvailableTags()"),
)
suspend fun getTags(): Set<MangaTag> = getAvailableTags()
public fun getRequestHeaders(): Headers
/**
* Parse favicons from the main page of the source`s website
* Return [Manga] object by web link to it
* @see [Manga.publicUrl]
*/
open suspend fun getFavicons(): Favicons {
return FaviconParser(webClient, domain).parseFavicons()
}
@CallSuper
open fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
keys.add(configKeyDomain)
}
open suspend fun getRelatedManga(seed: Manga): List<Manga> {
return RelatedMangaFinder(listOf(this)).invoke(seed)
}
protected fun getParser(source: MangaSource) = if (this.source == source) {
this
} else {
context.newParserInstance(source)
}
@InternalParsersApi
public suspend fun resolveLink(resolver: LinkResolver, link: HttpUrl): Manga?
}

@ -6,19 +6,19 @@ import org.koitharu.kotatsu.parsers.exception.ParseException
/**
* Implement this in your parser for authorization support
*/
interface MangaParserAuthProvider {
public interface MangaParserAuthProvider {
/**
* Return link to the login page, which will be opened in browser.
* Must be an absolute url
*/
val authUrl: String
public val authUrl: String
/**
* Quick check if user is logged in.
* In most case you should check for cookies in [MangaLoaderContext.cookieJar].
*/
val isAuthorized: Boolean
public suspend fun isAuthorized(): Boolean
/**
* Fetch and return current user`s name or login.
@ -26,5 +26,5 @@ interface MangaParserAuthProvider {
* @throws [AuthRequiredException] if user is not logged in or authorization is expired
* @throws [ParseException] on parsing error
*/
suspend fun getUsername(): String
}
public suspend fun getUsername(): String
}

@ -6,7 +6,8 @@ import org.koitharu.kotatsu.parsers.model.ContentType
* Annotate each [MangaParser] implementation with this annotation, used by codegen
*/
@Target(AnnotationTarget.CLASS)
annotation class MangaSourceParser(
@Retention(AnnotationRetention.SOURCE)
internal annotation class MangaSourceParser(
/**
* Name of manga source. Used as an Enum value, must be UPPER_CASE and unique.
*/

@ -1,89 +0,0 @@
package org.koitharu.kotatsu.parsers
import androidx.annotation.RestrictTo
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.Paginator
@InternalParsersApi
abstract class PagedMangaParser(
context: MangaLoaderContext,
source: MangaSource,
@RestrictTo(RestrictTo.Scope.TESTS) @JvmField internal val pageSize: Int,
searchPageSize: Int = pageSize,
) : MangaParser(context, source) {
@JvmField
protected val paginator = Paginator(pageSize)
@JvmField
protected val searchPaginator = Paginator(searchPageSize)
final override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
return getList(
paginator = if (filter is MangaListFilter.Search) {
searchPaginator
} else {
paginator
},
offset = offset,
filter = filter,
)
}
@InternalParsersApi
@Deprecated("You should use getListPage for PagedMangaParser", level = DeprecationLevel.HIDDEN)
final override suspend fun getList(
offset: Int,
query: String?,
tags: Set<MangaTag>?,
tagsExclude: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> = throw UnsupportedOperationException("You should use getListPage for PagedMangaParser")
open suspend fun getListPage(
page: Int,
query: String?,
tags: Set<MangaTag>?,
tagsExclude: Set<MangaTag>?,
sortOrder: SortOrder,
): List<Manga> = throw NotImplementedError("Please implement getListPage(page, filter) instead")
open suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
return when (filter) {
is MangaListFilter.Advanced -> getListPage(
page = page,
query = null,
tags = filter.tags,
tagsExclude = filter.tagsExclude,
sortOrder = filter.sortOrder,
)
is MangaListFilter.Search -> getListPage(
page = page,
query = filter.query,
tags = null,
tagsExclude = null,
sortOrder = defaultSortOrder,
)
null -> getListPage(
page = page,
query = null,
tags = null,
tagsExclude = null,
sortOrder = defaultSortOrder,
)
}
}
private suspend fun getList(
paginator: Paginator,
offset: Int,
filter: MangaListFilter?,
): List<Manga> {
val page = paginator.getPage(offset)
val list = getListPage(page, filter)
paginator.onListReceived(offset, page, list.size)
return list
}
}

@ -0,0 +1,9 @@
package org.koitharu.kotatsu.parsers.bitmap
public interface Bitmap {
public val width: Int
public val height: Int
public fun drawBitmap(sourceBitmap: Bitmap, src: Rect, dst: Rect)
}

@ -0,0 +1,15 @@
package org.koitharu.kotatsu.parsers.bitmap
public data class Rect(
val left: Int = 0,
val top: Int = 0,
val right: Int = 0,
val bottom: Int = 0,
) {
val width: Int
get() = right - left
val height: Int
get() = bottom - top
}

@ -1,13 +1,13 @@
package org.koitharu.kotatsu.parsers.config
sealed class ConfigKey<T>(
@JvmField val key: String,
public sealed class ConfigKey<T>(
@JvmField public val key: String,
) {
abstract val defaultValue: T
public abstract val defaultValue: T
class Domain(
@JvmField @JvmSuppressWildcards vararg val presetValues: String,
public class Domain(
@JvmField @JvmSuppressWildcards public vararg val presetValues: String,
) : ConfigKey<String>("domain") {
init {
@ -18,15 +18,20 @@ sealed class ConfigKey<T>(
get() = presetValues.first()
}
class ShowSuspiciousContent(
public class ShowSuspiciousContent(
override val defaultValue: Boolean,
) : ConfigKey<Boolean>("show_suspicious")
class UserAgent(
public class UserAgent(
override val defaultValue: String,
) : ConfigKey<String>("user_agent")
class SplitByTranslations(
public class SplitByTranslations(
override val defaultValue: Boolean,
) : ConfigKey<Boolean>("split_translations")
public class PreferredImageServer(
public val presetValues: Map<String?, String?>,
override val defaultValue: String?,
) : ConfigKey<String?>("img_server")
}

@ -1,6 +1,6 @@
package org.koitharu.kotatsu.parsers.config
interface MangaSourceConfig {
public interface MangaSourceConfig {
operator fun <T> get(key: ConfigKey<T>): T
}
public operator fun <T> get(key: ConfigKey<T>): T
}

@ -0,0 +1,105 @@
package org.koitharu.kotatsu.parsers.core
import androidx.annotation.CallSuper
import okhttp3.Headers
import okhttp3.HttpUrl
import okhttp3.Interceptor
import okhttp3.Response
import org.koitharu.kotatsu.parsers.InternalParsersApi
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.config.MangaSourceConfig
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQuery
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQueryCapabilities
import org.koitharu.kotatsu.parsers.network.OkHttpWebClient
import org.koitharu.kotatsu.parsers.network.WebClient
import org.koitharu.kotatsu.parsers.util.*
import java.util.*
@Suppress("OVERRIDE_DEPRECATION")
@InternalParsersApi
public abstract class AbstractMangaParser @InternalParsersApi constructor(
@property:InternalParsersApi public val context: MangaLoaderContext,
public final override val source: MangaParserSource,
) : MangaParser {
public final override val searchQueryCapabilities: MangaSearchQueryCapabilities
get() = filterCapabilities.toMangaSearchQueryCapabilities()
public override val config: MangaSourceConfig by lazy { context.getConfig(source) }
public open val sourceLocale: Locale
get() = if (source.locale.isEmpty()) Locale.ROOT else Locale(source.locale)
protected val sourceContentRating: ContentRating?
get() = if (source.contentType == ContentType.HENTAI) {
ContentRating.ADULT
} else {
null
}
protected val isNsfwSource: Boolean = source.contentType == ContentType.HENTAI
protected open val userAgentKey: ConfigKey.UserAgent = ConfigKey.UserAgent(context.getDefaultUserAgent())
override fun getRequestHeaders(): Headers = Headers.Builder()
.add("User-Agent", config[userAgentKey])
.build()
/**
* Used as fallback if value of `order` passed to [getList] is null
*/
public open val defaultSortOrder: SortOrder
get() {
val supported = availableSortOrders
return SortOrder.entries.first { it in supported }
}
final override val domain: String
get() = config[configKeyDomain]
@JvmField
protected val webClient: WebClient = OkHttpWebClient(context.httpClient, source)
/**
* Search list of manga by specified searchQuery
*
* @param query searchQuery
*/
public final override suspend fun getList(query: MangaSearchQuery): List<Manga> = getList(
offset = query.offset,
order = query.order ?: defaultSortOrder,
filter = convertToMangaListFilter(query),
)
/**
* Fetch direct link to the page image.
*/
public override suspend fun getPageUrl(page: MangaPage): String = page.url.toAbsoluteUrl(domain)
/**
* Parse favicons from the main page of the source`s website
*/
public override suspend fun getFavicons(): Favicons {
return FaviconParser(webClient, domain).parseFavicons()
}
@CallSuper
public override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
keys.add(configKeyDomain)
}
public override suspend fun getRelatedManga(seed: Manga): List<Manga> {
return RelatedMangaFinder(listOf(this)).invoke(seed)
}
/**
* Return [Manga] object by web link to it
* @see [Manga.publicUrl]
*/
override suspend fun resolveLink(resolver: LinkResolver, link: HttpUrl): Manga? = null
override fun intercept(chain: Interceptor.Chain): Response = chain.proceed(chain.request())
}

@ -0,0 +1,94 @@
package org.koitharu.kotatsu.parsers.core
import androidx.annotation.CallSuper
import okhttp3.Headers
import okhttp3.HttpUrl
import okhttp3.Interceptor
import okhttp3.Response
import org.koitharu.kotatsu.parsers.InternalParsersApi
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.config.MangaSourceConfig
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.network.OkHttpWebClient
import org.koitharu.kotatsu.parsers.network.WebClient
import org.koitharu.kotatsu.parsers.util.*
import java.util.*
@Deprecated("Too complex. Use AbstractMangaParser instead")
internal abstract class FlexibleMangaParser @InternalParsersApi constructor(
@property:InternalParsersApi val context: MangaLoaderContext,
final override val source: MangaParserSource,
) : MangaParser {
override val config: MangaSourceConfig by lazy { context.getConfig(source) }
open val sourceLocale: Locale
get() = if (source.locale.isEmpty()) Locale.ROOT else Locale(source.locale)
protected open val userAgentKey: ConfigKey.UserAgent = ConfigKey.UserAgent(context.getDefaultUserAgent())
final override val filterCapabilities: MangaListFilterCapabilities
get() = searchQueryCapabilities.toMangaListFilterCapabilities()
protected val sourceContentRating: ContentRating?
get() = if (source.contentType == ContentType.HENTAI) {
ContentRating.ADULT
} else {
null
}
final override val domain: String
get() = config[configKeyDomain]
@Deprecated("Override intercept() instead")
override fun getRequestHeaders(): Headers = Headers.Builder()
.add("User-Agent", config[userAgentKey])
.build()
/**
* Used as fallback if value of `order` passed to [getList] is null
*/
open val defaultSortOrder: SortOrder
get() {
val supported = availableSortOrders
return SortOrder.entries.first { it in supported }
}
@JvmField
protected val webClient: WebClient = OkHttpWebClient(context.httpClient, source)
/**
* Fetch direct link to the page image.
*/
override suspend fun getPageUrl(page: MangaPage): String = page.url.toAbsoluteUrl(domain)
final override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
return getList(convertToMangaSearchQuery(offset, order, filter))
}
/**
* Parse favicons from the main page of the source`s website
*/
override suspend fun getFavicons(): Favicons {
return FaviconParser(webClient, domain).parseFavicons()
}
@CallSuper
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
keys.add(configKeyDomain)
}
override suspend fun getRelatedManga(seed: Manga): List<Manga> {
return RelatedMangaFinder(listOf(this)).invoke(seed)
}
/**
* Return [Manga] object by web link to it
* @see [Manga.publicUrl]
*/
override suspend fun resolveLink(resolver: LinkResolver, link: HttpUrl): Manga? = null
override fun intercept(chain: Interceptor.Chain): Response = chain.proceed(chain.request())
}

@ -0,0 +1,60 @@
package org.koitharu.kotatsu.parsers.core
import androidx.annotation.VisibleForTesting
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.model.Manga
import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQuery
import org.koitharu.kotatsu.parsers.model.search.SearchableField
import org.koitharu.kotatsu.parsers.util.Paginator
@Deprecated("Too complex. Use PagedMangaParser instead")
internal abstract class FlexiblePagedMangaParser(
context: MangaLoaderContext,
source: MangaParserSource,
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) @JvmField public val pageSize: Int,
searchPageSize: Int = pageSize,
) : FlexibleMangaParser(context, source) {
@JvmField
protected val paginator: Paginator = Paginator(pageSize)
@JvmField
protected val searchPaginator: Paginator = Paginator(searchPageSize)
final override suspend fun getList(query: MangaSearchQuery): List<Manga> {
var containTitleNameCriteria = false
query.criteria.forEach {
if (it.field == SearchableField.TITLE_NAME) {
containTitleNameCriteria = true
}
}
return searchManga(
paginator = if (containTitleNameCriteria) {
paginator
} else {
searchPaginator
},
query = query,
)
}
public abstract suspend fun getListPage(query: MangaSearchQuery, page: Int): List<Manga>
protected fun setFirstPage(firstPage: Int, firstPageForSearch: Int = firstPage) {
paginator.firstPage = firstPage
searchPaginator.firstPage = firstPageForSearch
}
private suspend fun searchManga(
paginator: Paginator,
query: MangaSearchQuery,
): List<Manga> {
val offset: Int = query.offset
val page = paginator.getPage(offset)
val list = getListPage(query, page)
paginator.onListReceived(offset, page, list.size)
return list
}
}

@ -0,0 +1,77 @@
package org.koitharu.kotatsu.parsers.core
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import okhttp3.Interceptor
import okhttp3.Request
import okhttp3.Response
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQuery
import org.koitharu.kotatsu.parsers.util.mergeWith
internal class MangaParserWrapper(
private val delegate: MangaParser,
) : MangaParser by delegate {
override val authorizationProvider: MangaParserAuthProvider?
get() = delegate as? MangaParserAuthProvider
@Deprecated("Too complex. Use getList with filter instead")
override suspend fun getList(query: MangaSearchQuery): List<Manga> = withContext(Dispatchers.Default) {
if (!query.skipValidation) {
searchQueryCapabilities.validate(query)
}
delegate.getList(query)
}
override suspend fun getList(
offset: Int,
order: SortOrder,
filter: MangaListFilter,
): List<Manga> = withContext(Dispatchers.Default) {
delegate.getList(offset, order, filter)
}
override suspend fun getDetails(manga: Manga): Manga = withContext(Dispatchers.Default) {
delegate.getDetails(manga)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> = withContext(Dispatchers.Default) {
delegate.getPages(chapter)
}
override suspend fun getPageUrl(page: MangaPage): String = withContext(Dispatchers.Default) {
delegate.getPageUrl(page)
}
override suspend fun getFilterOptions(): MangaListFilterOptions = withContext(Dispatchers.Default) {
delegate.getFilterOptions()
}
override suspend fun getFavicons(): Favicons = withContext(Dispatchers.Default) {
delegate.getFavicons()
}
override suspend fun getRelatedManga(seed: Manga): List<Manga> = withContext(Dispatchers.Default) {
delegate.getRelatedManga(seed)
}
override fun intercept(chain: Interceptor.Chain): Response {
val request = chain.request()
val headers = request.headers.newBuilder()
.mergeWith(delegate.getRequestHeaders(), replaceExisting = false)
.build()
val newRequest = request.newBuilder().headers(headers).build()
return delegate.intercept(ProxyChain(chain, newRequest))
}
private class ProxyChain(
private val delegate: Interceptor.Chain,
private val request: Request,
) : Interceptor.Chain by delegate {
override fun request(): Request = request
}
}

@ -0,0 +1,57 @@
package org.koitharu.kotatsu.parsers.core
import androidx.annotation.VisibleForTesting
import org.koitharu.kotatsu.parsers.InternalParsersApi
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.model.Manga
import org.koitharu.kotatsu.parsers.model.MangaListFilter
import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.model.SortOrder
import org.koitharu.kotatsu.parsers.util.Paginator
@InternalParsersApi
public abstract class PagedMangaParser(
context: MangaLoaderContext,
source: MangaParserSource,
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) @JvmField public val pageSize: Int,
searchPageSize: Int = pageSize,
) : AbstractMangaParser(context, source) {
@JvmField
protected val paginator: Paginator = Paginator(pageSize)
@JvmField
protected val searchPaginator: Paginator = Paginator(searchPageSize)
final override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
return getList(
paginator = if (filter.query.isNullOrEmpty()) {
paginator
} else {
searchPaginator
},
offset = offset,
order = order,
filter = filter,
)
}
public abstract suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga>
protected fun setFirstPage(firstPage: Int, firstPageForSearch: Int = firstPage) {
paginator.firstPage = firstPage
searchPaginator.firstPage = firstPageForSearch
}
private suspend fun getList(
paginator: Paginator,
offset: Int,
order: SortOrder,
filter: MangaListFilter,
): List<Manga> {
val page = paginator.getPage(offset)
val list = getListPage(page, order, filter)
paginator.onListReceived(offset, page, list.size)
return list
}
}

@ -0,0 +1,24 @@
package org.koitharu.kotatsu.parsers.core
import org.koitharu.kotatsu.parsers.InternalParsersApi
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.model.Manga
import org.koitharu.kotatsu.parsers.model.MangaListFilter
import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.model.SortOrder
@InternalParsersApi
public abstract class SinglePageMangaParser(
context: MangaLoaderContext,
source: MangaParserSource,
) : AbstractMangaParser(context, source) {
final override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
if (offset > 0) {
return emptyList()
}
return getList(order, filter)
}
public abstract suspend fun getList(order: SortOrder, filter: MangaListFilter): List<Manga>
}

@ -1,12 +1,13 @@
package org.koitharu.kotatsu.parsers.exception
import okio.IOException
import org.koitharu.kotatsu.parsers.InternalParsersApi
import org.koitharu.kotatsu.parsers.model.MangaSource
/**
* Authorization is required for access to the requested content
*/
class AuthRequiredException @InternalParsersApi @JvmOverloads constructor(
val source: MangaSource,
public class AuthRequiredException @InternalParsersApi @JvmOverloads constructor(
public val source: MangaSource,
cause: Throwable? = null,
) : RuntimeException("Authorization required", cause)
) : IOException("Authorization required", cause)

@ -1,3 +1,3 @@
package org.koitharu.kotatsu.parsers.exception
class ContentUnavailableException(message: String) : RuntimeException(message)
public class ContentUnavailableException(message: String) : RuntimeException(message)

@ -2,14 +2,15 @@ package org.koitharu.kotatsu.parsers.exception
import okio.IOException
import org.json.JSONArray
import org.koitharu.kotatsu.parsers.util.json.mapJSON
import org.koitharu.kotatsu.parsers.InternalParsersApi
import org.koitharu.kotatsu.parsers.util.json.mapJSONNotNull
class GraphQLException(private val errors: JSONArray) : IOException() {
public class GraphQLException @InternalParsersApi constructor(errors: JSONArray) : IOException() {
val messages = errors.mapJSON {
public val messages: List<String> = errors.mapJSONNotNull {
it.getString("message")
}
override val message: String
get() = messages.joinToString("\n")
}
}

@ -3,7 +3,7 @@ package org.koitharu.kotatsu.parsers.exception
import org.jsoup.HttpStatusException
import java.net.HttpURLConnection
class NotFoundException(
public class NotFoundException(
message: String,
url: String,
) : HttpStatusException(message, HttpURLConnection.HTTP_NOT_FOUND, url)
) : HttpStatusException(message, HttpURLConnection.HTTP_NOT_FOUND, url)

@ -2,8 +2,8 @@ package org.koitharu.kotatsu.parsers.exception
import org.koitharu.kotatsu.parsers.InternalParsersApi
class ParseException @InternalParsersApi @JvmOverloads constructor(
val shortMessage: String?,
val url: String,
public class ParseException @InternalParsersApi @JvmOverloads constructor(
public val shortMessage: String?,
public val url: String,
cause: Throwable? = null,
) : RuntimeException("$shortMessage at $url", cause)
) : RuntimeException("$shortMessage at $url", cause)

@ -0,0 +1,31 @@
package org.koitharu.kotatsu.parsers.exception
import okio.IOException
import java.time.Instant
import java.time.temporal.ChronoUnit
public class TooManyRequestExceptions(
public val url: String,
retryAfter: Long,
) : IOException("Too man requests") {
public val retryAt: Instant? = if (retryAfter > 0 && retryAfter < Long.MAX_VALUE) {
Instant.now().plusMillis(retryAfter)
} else {
null
}
public fun getRetryDelay(): Long {
if (retryAt == null) {
return -1L
}
return Instant.now().until(retryAt, ChronoUnit.MILLIS).coerceAtLeast(0L)
}
override val message: String?
get() = if (retryAt != null) {
"${super.message}, retry at $retryAt"
} else {
super.message
}
}

@ -2,4 +2,10 @@
package org.koitharu.kotatsu.parsers.model
const val RATING_UNKNOWN = -1f
public const val RATING_UNKNOWN: Float = -1f
public const val YEAR_UNKNOWN: Int = 0
public const val YEAR_MIN: Int = 1900
public const val YEAR_MAX: Int = 2099

@ -1,6 +1,6 @@
package org.koitharu.kotatsu.parsers.model
enum class ContentRating {
public enum class ContentRating {
SAFE,
SUGGESTIVE,
ADULT

@ -1,12 +1,16 @@
package org.koitharu.kotatsu.parsers.model
enum class ContentType {
public enum class ContentType {
/**
* Standard manga, manhua, webtoons, etc
*/
MANGA,
MANHWA,
MANHUA,
/**
* Use this if the source provides mostly nsfw content.
*/
@ -17,8 +21,16 @@ enum class ContentType {
*/
COMICS,
NOVEL,
/**
* Use this type if no other suits your needs. For example, for an indie manga
*/
ONE_SHOT,
DOUJINSHI,
IMAGE_SET,
ARTIST_CG,
GAME_CG,
OTHER,
}

@ -0,0 +1,10 @@
package org.koitharu.kotatsu.parsers.model
public enum class Demographic {
SHOUNEN,
SHOUJO,
SEINEN,
JOSEI,
KODOMO,
NONE,
}

@ -2,14 +2,14 @@ package org.koitharu.kotatsu.parsers.model
import okhttp3.HttpUrl.Companion.toHttpUrl
class Favicon internal constructor(
@JvmField val url: String,
@JvmField val size: Int,
public data class Favicon(
@JvmField public val url: String,
@JvmField public val size: Int,
@JvmField internal val rel: String?,
) : Comparable<Favicon> {
@JvmField
val type: String = url.toHttpUrl().pathSegments.last()
public val type: String = url.toHttpUrl().pathSegments.last()
.substringAfterLast('.', "").lowercase()
override fun compareTo(other: Favicon): Int {
@ -20,30 +20,6 @@ class Favicon internal constructor(
return relWeightOf(rel).compareTo(relWeightOf(other.rel))
}
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as Favicon
if (url != other.url) return false
if (size != other.size) return false
if (rel != other.rel) return false
return true
}
override fun hashCode(): Int {
var result = url.hashCode()
result = 31 * result + size
result = 31 * result + rel.hashCode()
return result
}
override fun toString(): String {
return "Favicon(size=$size, type='$type', rel='$rel', url='$url')"
}
private fun relWeightOf(rel: String?) = when (rel) {
"apple-touch-icon" -> 1 // Prefer apple-touch-icon because it has a better quality
"mask-icon" -> -1

@ -1,8 +1,8 @@
package org.koitharu.kotatsu.parsers.model
class Favicons internal constructor(
public class Favicons(
favicons: Collection<Favicon>,
@JvmField val referer: String,
@JvmField public val referer: String?,
) : Collection<Favicon> {
private val icons = favicons.sortedDescending()
@ -18,7 +18,7 @@ class Favicons internal constructor(
override fun iterator(): Iterator<Favicon> = icons.iterator()
operator fun minus(victim: Favicon): Favicons = Favicons(
public operator fun minus(victim: Favicon): Favicons = Favicons(
favicons = icons.filterNot { it == victim },
referer = referer,
)
@ -30,7 +30,7 @@ class Favicons internal constructor(
* @param types supported file types, e.g. png, svg, ico. May be null but not empty
*/
@JvmOverloads
fun find(size: Int, types: Set<String>? = null): Favicon? {
public fun find(size: Int, types: Set<String>? = null): Favicon? {
if (icons.isEmpty()) {
return null
}
@ -47,4 +47,13 @@ class Favicons internal constructor(
}
return result
}
public companion object {
@JvmStatic
public val EMPTY: Favicons = Favicons(emptySet(), null)
@JvmStatic
public fun single(url: String): Favicons = Favicons(setOf(Favicon(url, 0, null)), null)
}
}

@ -1,162 +1,203 @@
package org.koitharu.kotatsu.parsers.model
import org.koitharu.kotatsu.parsers.InternalParsersApi
import androidx.collection.ArrayMap
import org.koitharu.kotatsu.parsers.util.findById
import org.koitharu.kotatsu.parsers.util.nullIfEmpty
class Manga(
public data class Manga(
/**
* Unique identifier for manga
*/
@JvmField val id: Long,
@JvmField public val id: Long,
/**
* Manga title, human-readable
*/
@JvmField val title: String,
@JvmField public val title: String,
/**
* Alternative title (for example on other language), may be null
* Alternative titles (for example on other language), may be empty
*/
@JvmField val altTitle: String?,
@JvmField public val altTitles: Set<String>,
/**
* Relative url to manga (**without** a domain) or any other uri.
* Used principally in parsers
*/
@JvmField val url: String,
@JvmField public val url: String,
/**
* Absolute url to manga, must be ready to open in browser
*/
@JvmField val publicUrl: String,
@JvmField public val publicUrl: String,
/**
* Normalized manga rating, must be in range of 0..1 or [RATING_UNKNOWN] if rating s unknown
* @see hasRating
*/
@JvmField val rating: Float,
@JvmField public val rating: Float,
/**
* Indicates that manga may contain sensitive information (18+, NSFW)
*/
@JvmField val isNsfw: Boolean,
@JvmField public val contentRating: ContentRating?,
/**
* Absolute link to the cover
* @see largeCoverUrl
*/
@JvmField val coverUrl: String,
@JvmField public val coverUrl: String?,
/**
* Tags (genres) of the manga
*/
@JvmField val tags: Set<MangaTag>,
@JvmField public val tags: Set<MangaTag>,
/**
* Manga status (ongoing, finished) or null if unknown
*/
@JvmField val state: MangaState?,
@JvmField public val state: MangaState?,
/**
* Author of the manga, may be null
* Authors of the manga
*/
@JvmField val author: String?,
@JvmField public val authors: Set<String>,
/**
* Large cover url (absolute), null if is no large cover
* @see coverUrl
*/
@JvmField val largeCoverUrl: String? = null,
@JvmField public val largeCoverUrl: String? = null,
/**
* Manga description, may be html or null
*/
@JvmField val description: String? = null,
@JvmField public val description: String? = null,
/**
* List of chapters
*/
@JvmField val chapters: List<MangaChapter>? = null,
@JvmField public val chapters: List<MangaChapter>? = null,
/**
* Manga source
*/
@JvmField val source: MangaSource,
@JvmField public val source: MangaSource,
) {
/**
* Return if manga has a specified rating
* @see rating
*/
val hasRating: Boolean
get() = rating > 0f && rating <= 1f
fun getChapters(branch: String?): List<MangaChapter>? {
return chapters?.filter { x -> x.branch == branch }
}
@InternalParsersApi
fun copy(
title: String = this.title,
altTitle: String? = this.altTitle,
publicUrl: String = this.publicUrl,
rating: Float = this.rating,
isNsfw: Boolean = this.isNsfw,
coverUrl: String = this.coverUrl,
tags: Set<MangaTag> = this.tags,
state: MangaState? = this.state,
author: String? = this.author,
largeCoverUrl: String? = this.largeCoverUrl,
description: String? = this.description,
chapters: List<MangaChapter>? = this.chapters,
) = Manga(
@Deprecated("Use other constructor")
public constructor(
/**
* Unique identifier for manga
*/
id: Long,
/**
* Manga title, human-readable
*/
title: String,
/**
* Alternative title (for example on other language), may be null
*/
altTitle: String?,
/**
* Relative url to manga (**without** a domain) or any other uri.
* Used principally in parsers
*/
url: String,
/**
* Absolute url to manga, must be ready to open in browser
*/
publicUrl: String,
/**
* Normalized manga rating, must be in range of 0..1 or [RATING_UNKNOWN] if rating s unknown
* @see hasRating
*/
rating: Float,
/**
* Indicates that manga may contain sensitive information (18+, NSFW)
*/
isNsfw: Boolean,
/**
* Absolute link to the cover
* @see largeCoverUrl
*/
coverUrl: String?,
/**
* Tags (genres) of the manga
*/
tags: Set<MangaTag>,
/**
* Manga status (ongoing, finished) or null if unknown
*/
state: MangaState?,
/**
* Authors of the manga
*/
author: String?,
/**
* Large cover url (absolute), null if is no large cover
* @see coverUrl
*/
largeCoverUrl: String? = null,
/**
* Manga description, may be html or null
*/
description: String? = null,
/**
* List of chapters
*/
chapters: List<MangaChapter>? = null,
/**
* Manga source
*/
source: MangaSource,
) : this(
id = id,
title = title,
altTitle = altTitle,
altTitles = setOfNotNull(altTitle?.nullIfEmpty()),
url = url,
publicUrl = publicUrl,
rating = rating,
isNsfw = isNsfw,
coverUrl = coverUrl,
contentRating = if (isNsfw) ContentRating.ADULT else null,
coverUrl = coverUrl?.nullIfEmpty(),
tags = tags,
state = state,
author = author,
largeCoverUrl = largeCoverUrl,
description = description,
authors = setOfNotNull(author),
largeCoverUrl = largeCoverUrl?.nullIfEmpty(),
description = description?.nullIfEmpty(),
chapters = chapters,
source = source
source = source,
)
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
/**
* Author of the manga, may be null
*/
@Deprecated("Please use authors")
public val author: String?
get() = authors.firstOrNull()
/**
* Alternative title (for example on other language), may be null
*/
@Deprecated("Please use altTitles")
public val altTitle: String?
get() = altTitles.firstOrNull()
other as Manga
/**
* Return if manga has a specified rating
* @see rating
*/
public val hasRating: Boolean
get() = rating > 0f && rating <= 1f
if (id != other.id) return false
if (title != other.title) return false
if (altTitle != other.altTitle) return false
if (url != other.url) return false
if (publicUrl != other.publicUrl) return false
if (rating != other.rating) return false
if (isNsfw != other.isNsfw) return false
if (coverUrl != other.coverUrl) return false
if (tags != other.tags) return false
if (state != other.state) return false
if (author != other.author) return false
if (largeCoverUrl != other.largeCoverUrl) return false
if (description != other.description) return false
if (chapters != other.chapters) return false
if (source != other.source) return false
@Deprecated("Use contentRating instead", ReplaceWith("contentRating == ContentRating.ADULT"))
public val isNsfw: Boolean
get() = contentRating == ContentRating.ADULT
return true
public fun getChapters(branch: String?): List<MangaChapter> {
return chapters?.filter { x -> x.branch == branch }.orEmpty()
}
override fun hashCode(): Int {
var result = id.hashCode()
result = 31 * result + title.hashCode()
result = 31 * result + (altTitle?.hashCode() ?: 0)
result = 31 * result + url.hashCode()
result = 31 * result + publicUrl.hashCode()
result = 31 * result + rating.hashCode()
result = 31 * result + isNsfw.hashCode()
result = 31 * result + coverUrl.hashCode()
result = 31 * result + tags.hashCode()
result = 31 * result + (state?.hashCode() ?: 0)
result = 31 * result + (author?.hashCode() ?: 0)
result = 31 * result + (largeCoverUrl?.hashCode() ?: 0)
result = 31 * result + (description?.hashCode() ?: 0)
result = 31 * result + (chapters?.hashCode() ?: 0)
result = 31 * result + source.hashCode()
return result
}
public fun findChapterById(id: Long): MangaChapter? = chapters?.findById(id)
public fun requireChapterById(id: Long): MangaChapter = findChapterById(id)
?: throw NoSuchElementException("Chapter with id $id not found")
override fun toString(): String {
return "Manga($id - \"$title\" [$url] - $source)"
public fun getBranches(): Map<String?, Int> {
if (chapters.isNullOrEmpty()) {
return emptyMap()
}
val result = ArrayMap<String?, Int>()
chapters.forEach {
val key = it.branch
result[key] = result.getOrDefault(key, 0) + 1
}
return result
}
}

@ -1,110 +1,65 @@
package org.koitharu.kotatsu.parsers.model
class MangaChapter(
import org.koitharu.kotatsu.parsers.util.formatSimple
import org.koitharu.kotatsu.parsers.util.ifNullOrEmpty
public data class MangaChapter(
/**
* An unique id of chapter
*/
@JvmField val id: Long,
@JvmField public val id: Long,
/**
* User-readable name of chapter
* User-readable name of chapter if provided by parser or null instead
* Do not pass manga title or chapter number here
*/
@JvmField val name: String,
@JvmField public val title: String?,
/**
* Chapter number starting from 1, 0 if unknown
*/
@JvmField val number: Float,
@JvmField public val number: Float,
/**
* Volume number starting from 1, 0 if unknown
*/
@JvmField val volume: Int,
@JvmField public val volume: Int,
/**
* Relative url to chapter (**without** a domain) or any other uri.
* Used principally in parsers
*/
@JvmField val url: String,
@JvmField public val url: String,
/**
* User-readable name of scanlator (releaser) or null if unknown
*/
@JvmField val scanlator: String?,
@JvmField public val scanlator: String?,
/**
* Chapter upload date in milliseconds
*/
@JvmField val uploadDate: Long,
@JvmField public val uploadDate: Long,
/**
* User-readable name of branch.
* A branch is a group of chapters that overlap (e.g. different languages)
*/
@JvmField val branch: String?,
@JvmField val source: MangaSource,
@JvmField public val branch: String?,
@JvmField public val source: MangaSource,
) {
@Deprecated(message = "Consider using constructor with volume value")
constructor(
id: Long,
name: String,
number: Int,
url: String,
scanlator: String?,
uploadDate: Long,
branch: String?,
source: MangaSource,
) : this(
id = id,
name = name,
number = number.toFloat(),
volume = 0,
url = url,
scanlator = scanlator,
uploadDate = uploadDate,
branch = branch,
source = source,
)
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as MangaChapter
if (id != other.id) return false
if (name != other.name) return false
if (number != other.number) return false
if (volume != other.volume) return false
if (url != other.url) return false
if (scanlator != other.scanlator) return false
if (uploadDate != other.uploadDate) return false
if (branch != other.branch) return false
if (source != other.source) return false
@Deprecated("Use title instead", ReplaceWith("title"))
val name: String
get() = title.ifNullOrEmpty {
buildString {
if (volume > 0) append("Vol ").append(volume).append(' ')
if (number > 0) append("Chapter ").append(number.formatSimple()) else append("Unnamed")
}
}
return true
public fun numberString(): String? = if (number > 0f) {
number.formatSimple()
} else {
null
}
override fun hashCode(): Int {
var result = id.hashCode()
result = 31 * result + name.hashCode()
result = 31 * result + number.hashCode()
result = 31 * result + volume
result = 31 * result + url.hashCode()
result = 31 * result + (scanlator?.hashCode() ?: 0)
result = 31 * result + uploadDate.hashCode()
result = 31 * result + (branch?.hashCode() ?: 0)
result = 31 * result + source.hashCode()
return result
public fun volumeString(): String? = if (volume > 0) {
volume.toString()
} else {
null
}
override fun toString(): String {
return "MangaChapter($id - #$number [$url] - $source)"
}
internal fun copy(volume: Int, number: Float) = MangaChapter(
id = id,
name = name,
number = number,
volume = volume,
url = url,
scanlator = scanlator,
uploadDate = uploadDate,
branch = branch,
source = source,
)
}

@ -1,93 +1,88 @@
package org.koitharu.kotatsu.parsers.model
import org.koitharu.kotatsu.parsers.MangaParser
import java.util.*
sealed interface MangaListFilter {
fun isEmpty(): Boolean
val sortOrder: SortOrder?
fun isValid(parser: MangaParser): Boolean = when (this) {
is Advanced -> (sortOrder in parser.availableSortOrders) &&
(tags.size <= 1 || parser.isMultipleTagsSupported) &&
(tagsExclude.isEmpty() || parser.isTagsExclusionSupported) &&
(contentRating.isEmpty() || parser.availableContentRating.containsAll(contentRating)) &&
(states.isEmpty() || parser.availableStates.containsAll(states))
is Search -> parser.isSearchSupported
}
data class Search(
@JvmField val query: String,
) : MangaListFilter {
override val sortOrder: SortOrder? = null
override fun isEmpty() = query.isBlank()
public data class MangaListFilter(
@JvmField val query: String? = null,
@JvmField val tags: Set<MangaTag> = emptySet(),
@JvmField val tagsExclude: Set<MangaTag> = emptySet(),
@JvmField val locale: Locale? = null,
@JvmField val originalLocale: Locale? = null,
@JvmField val states: Set<MangaState> = emptySet(),
@JvmField val contentRating: Set<ContentRating> = emptySet(),
@JvmField val types: Set<ContentType> = emptySet(),
@JvmField val demographics: Set<Demographic> = emptySet(),
@JvmField val year: Int = YEAR_UNKNOWN,
@JvmField val yearFrom: Int = YEAR_UNKNOWN,
@JvmField val yearTo: Int = YEAR_UNKNOWN,
@JvmField val author: String? = null,
) {
private fun isNonSearchOptionsEmpty(): Boolean = tags.isEmpty() &&
tagsExclude.isEmpty() &&
locale == null &&
originalLocale == null &&
states.isEmpty() &&
contentRating.isEmpty() &&
year == YEAR_UNKNOWN &&
yearFrom == YEAR_UNKNOWN &&
yearTo == YEAR_UNKNOWN &&
types.isEmpty() &&
demographics.isEmpty() &&
author.isNullOrEmpty()
public fun isEmpty(): Boolean = isNonSearchOptionsEmpty() && query.isNullOrEmpty()
public fun isNotEmpty(): Boolean = !isEmpty()
public fun hasNonSearchOptions(): Boolean = !isNonSearchOptionsEmpty()
public companion object {
@JvmStatic
public val EMPTY: MangaListFilter = MangaListFilter()
}
data class Advanced(
override val sortOrder: SortOrder,
@JvmField val tags: Set<MangaTag>,
@JvmField val tagsExclude: Set<MangaTag>,
@JvmField val locale: Locale?,
@JvmField val states: Set<MangaState>,
@JvmField val contentRating: Set<ContentRating>,
) : MangaListFilter {
override fun isEmpty(): Boolean =
tags.isEmpty() && tagsExclude.isEmpty() && locale == null && states.isEmpty() && contentRating.isEmpty()
fun newBuilder() = Builder(sortOrder)
.tags(tags)
.tagsExclude(tagsExclude)
.locale(locale)
.states(states)
.contentRatings(contentRating)
class Builder(sortOrder: SortOrder) {
private var _sortOrder: SortOrder = sortOrder
private var _tags: Set<MangaTag>? = null
private var _tagsExclude: Set<MangaTag>? = null
private var _locale: Locale? = null
private var _states: Set<MangaState>? = null
private var _contentRating: Set<ContentRating>? = null
fun sortOrder(order: SortOrder) = apply {
_sortOrder = order
}
fun tags(tags: Set<MangaTag>?) = apply {
_tags = tags
}
fun tagsExclude(tags: Set<MangaTag>?) = apply {
_tagsExclude = tags
}
fun locale(locale: Locale?) = apply {
_locale = locale
}
fun states(states: Set<MangaState>?) = apply {
_states = states
}
fun contentRatings(rating: Set<ContentRating>?) = apply {
_contentRating = rating
}
fun build() = Advanced(
sortOrder = _sortOrder,
tags = _tags.orEmpty(),
tagsExclude = _tagsExclude.orEmpty(),
locale = _locale,
states = _states.orEmpty(),
contentRating = _contentRating.orEmpty(),
)
}
internal class Builder {
private var query: String? = null
private val tags: MutableSet<MangaTag> = mutableSetOf()
private val tagsExclude: MutableSet<MangaTag> = mutableSetOf()
private var locale: Locale? = null
private var originalLocale: Locale? = null
private val states: MutableSet<MangaState> = mutableSetOf()
private val contentRating: MutableSet<ContentRating> = mutableSetOf()
private val types: MutableSet<ContentType> = mutableSetOf()
private val demographics: MutableSet<Demographic> = mutableSetOf()
private var year: Int = YEAR_UNKNOWN
private var yearFrom: Int = YEAR_UNKNOWN
private var yearTo: Int = YEAR_UNKNOWN
fun query(query: String?): Builder = apply { this.query = query }
fun addTag(tag: MangaTag): Builder = apply { tags.add(tag) }
fun addTags(tags: Collection<MangaTag>): Builder = apply { this.tags.addAll(tags) }
fun excludeTag(tag: MangaTag): Builder = apply { tagsExclude.add(tag) }
fun excludeTags(tags: Collection<MangaTag>): Builder = apply { this.tagsExclude.addAll(tags) }
fun locale(locale: Locale?): Builder = apply { this.locale = locale }
fun originalLocale(locale: Locale?): Builder = apply { this.originalLocale = locale }
fun addState(state: MangaState): Builder = apply { states.add(state) }
fun addStates(states: Collection<MangaState>): Builder = apply { this.states.addAll(states) }
fun addContentRating(rating: ContentRating): Builder = apply { contentRating.add(rating) }
fun addContentRatings(ratings: Collection<ContentRating>): Builder =
apply { this.contentRating.addAll(ratings) }
fun addType(type: ContentType): Builder = apply { types.add(type) }
fun addTypes(types: Collection<ContentType>): Builder = apply { this.types.addAll(types) }
fun addDemographic(demographic: Demographic): Builder = apply { demographics.add(demographic) }
fun addDemographics(demographics: Collection<Demographic>): Builder =
apply { this.demographics.addAll(demographics) }
fun year(year: Int): Builder = apply { this.year = year }
fun yearFrom(year: Int): Builder = apply { this.yearFrom = year }
fun yearTo(year: Int): Builder = apply { this.yearTo = year }
fun build(): MangaListFilter = MangaListFilter(
query, tags, tagsExclude, locale, originalLocale, states,
contentRating, types, demographics, year, yearFrom, yearTo,
)
}
}

@ -0,0 +1,56 @@
package org.koitharu.kotatsu.parsers.model
import org.koitharu.kotatsu.parsers.InternalParsersApi
public data class MangaListFilterCapabilities @InternalParsersApi constructor(
/**
* Whether parser supports filtering by more than one tag
* @see [MangaListFilter.tags]
* @see [MangaListFilterOptions.availableTags]
*/
val isMultipleTagsSupported: Boolean = false,
/**
* Whether parser supports tagsExclude field in filter
* @see [MangaListFilter.tagsExclude]
* @see [MangaListFilterOptions.availableTags]
*/
val isTagsExclusionSupported: Boolean = false,
/**
* Whether parser supports searching by string query
* @see [MangaListFilter.query]
*/
val isSearchSupported: Boolean = false,
/**
* Whether parser supports searching by string query combined within other filters
*/
val isSearchWithFiltersSupported: Boolean = false,
/**
* Whether parser supports searching/filtering by year
* @see [MangaListFilter.year]
*/
val isYearSupported: Boolean = false,
/**
* Whether parser supports searching by year range
* @see [MangaListFilter.yearFrom] and [MangaListFilter.yearTo]
*/
val isYearRangeSupported: Boolean = false,
/**
* Whether parser supports searching Original Languages
* @see [MangaListFilter.originalLocale]
* @see [MangaListFilterOptions.availableLocales]
*/
val isOriginalLocaleSupported: Boolean = false,
/**
* Whether parser supports searching by author name
* @see [MangaListFilter.author]
*/
val isAuthorSearchSupported: Boolean = false,
)

@ -0,0 +1,45 @@
package org.koitharu.kotatsu.parsers.model
import org.koitharu.kotatsu.parsers.InternalParsersApi
import java.util.*
public data class MangaListFilterOptions @InternalParsersApi constructor(
/**
* Available tags (genres)
*/
public val availableTags: Set<MangaTag> = emptySet(),
/**
* Supported [MangaState] variants for filtering. May be empty.
*
* For better performance use [EnumSet] for more than one item.
*/
public val availableStates: Set<MangaState> = emptySet(),
/**
* Supported [ContentRating] variants for filtering. May be empty.
*
* For better performance use [EnumSet] for more than one item.
*/
public val availableContentRating: Set<ContentRating> = emptySet(),
/**
* Supported [ContentType] variants for filtering. May be empty.
*
* For better performance use [EnumSet] for more than one item.
*/
public val availableContentTypes: Set<ContentType> = emptySet(),
/**
* Supported [Demographic] variants for filtering. May be empty.
*
* For better performance use [EnumSet] for more than one item.
*/
public val availableDemographics: Set<Demographic> = emptySet(),
/**
* Supported content locales for multilingual sources
*/
public val availableLocales: Set<Locale> = emptySet(),
)

@ -2,46 +2,21 @@ package org.koitharu.kotatsu.parsers.model
import org.koitharu.kotatsu.parsers.MangaParser
class MangaPage(
public data class MangaPage(
/**
* Unique identifier for manga
* Unique identifier for page
*/
@JvmField val id: Long,
@JvmField public val id: Long,
/**
* Relative url to page (**without** a domain) or any other uri.
* Used principally in parsers.
* May contain link to image or html page.
* @see MangaParser.getPageUrl
*/
@JvmField val url: String,
@JvmField public val url: String,
/**
* Absolute url of the small page image if exists, null otherwise
*/
@JvmField val preview: String?,
@JvmField val source: MangaSource,
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as MangaPage
if (id != other.id) return false
if (url != other.url) return false
if (preview != other.preview) return false
return source == other.source
}
override fun hashCode(): Int {
var result = id.hashCode()
result = 31 * result + url.hashCode()
result = 31 * result + (preview?.hashCode() ?: 0)
result = 31 * result + source.hashCode()
return result
}
override fun toString(): String {
return "MangaPage($id [$url] - $source)"
}
}
@JvmField public val preview: String?,
@JvmField public val source: MangaSource,
)

@ -0,0 +1,6 @@
package org.koitharu.kotatsu.parsers.model
public interface MangaSource {
public val name: String
}

@ -1,5 +1,5 @@
package org.koitharu.kotatsu.parsers.model
enum class MangaState {
ONGOING, FINISHED, ABANDONED, PAUSED, UPCOMING
public enum class MangaState {
ONGOING, FINISHED, ABANDONED, PAUSED, UPCOMING, RESTRICTED
}

@ -2,40 +2,15 @@ package org.koitharu.kotatsu.parsers.model
import org.koitharu.kotatsu.parsers.MangaParser
class MangaTag(
public data class MangaTag(
/**
* User-readable tag title, should be in Title case
*/
@JvmField val title: String,
@JvmField public val title: String,
/**
* Identifier of a tag, must be unique among the source.
* @see MangaParser.getList
*/
@JvmField val key: String,
@JvmField val source: MangaSource,
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as MangaTag
if (title != other.title) return false
if (key != other.key) return false
if (source != other.source) return false
return true
}
override fun hashCode(): Int {
var result = title.hashCode()
result = 31 * result + key.hashCode()
result = 31 * result + source.hashCode()
return result
}
override fun toString(): String {
return "MangaTag($key \"$title\" - $source)"
}
}
@JvmField public val key: String,
@JvmField public val source: MangaSource,
)

@ -1,10 +1,22 @@
package org.koitharu.kotatsu.parsers.model
enum class SortOrder {
public enum class SortOrder {
UPDATED,
UPDATED_ASC,
POPULARITY,
POPULARITY_ASC,
RATING,
RATING_ASC,
NEWEST,
NEWEST_ASC,
ALPHABETICAL,
ALPHABETICAL_DESC
ALPHABETICAL_DESC,
ADDED,
ADDED_ASC,
RELEVANCE,
POPULARITY_HOUR,
POPULARITY_TODAY,
POPULARITY_WEEK,
POPULARITY_MONTH,
POPULARITY_YEAR,
}

@ -3,9 +3,9 @@ package org.koitharu.kotatsu.parsers.model
import org.koitharu.kotatsu.parsers.InternalParsersApi
@InternalParsersApi
class WordSet(private vararg val words: String) {
public class WordSet(private vararg val words: String) {
fun anyWordIn(dateString: String): Boolean = words.any {
dateString.contains(it, ignoreCase = true)
}
}
public fun anyWordIn(dateString: String): Boolean = words.any { dateString.contains(it, ignoreCase = true) }
public fun startsWith(dateString: String): Boolean = words.any { dateString.startsWith(it, ignoreCase = true) }
public fun endsWith(dateString: String): Boolean = words.any { dateString.endsWith(it, ignoreCase = true) }
}

@ -0,0 +1,90 @@
package org.koitharu.kotatsu.parsers.model.search
import androidx.collection.ArrayMap
import androidx.collection.ArraySet
import org.koitharu.kotatsu.parsers.model.SortOrder
/**
* Represents a search query for filtering and sorting manga search results.
* This class is immutable and must be constructed using the [Builder].
*
* @property criteria The set of search criteria applied to the query.
* @property order The sorting order for the results (optional).
* @property offset The offset number for paginated search results (optional).
*/
@Deprecated("Too complex. Use MangaListFilter instead")
@ConsistentCopyVisibility
public data class MangaSearchQuery private constructor(
@JvmField public val criteria: Set<QueryCriteria<*>>,
@JvmField public val order: SortOrder?,
@JvmField public val offset: Int,
@JvmField public val skipValidation: Boolean,
) {
public fun newBuilder(): Builder = Builder(this)
public class Builder {
private val criteria = ArraySet<QueryCriteria<*>>()
private var order: SortOrder? = null
private var offset: Int = 0
private var skipValidation: Boolean = false
public constructor()
public constructor(query: MangaSearchQuery) : this() {
criteria.addAll(query.criteria)
order = query.order
offset = query.offset
}
public fun criterion(criterion: QueryCriteria<*>): Builder = apply { criteria.add(criterion) }
public fun order(order: SortOrder?): Builder = apply { this.order = order }
public fun offset(offset: Int): Builder = apply { this.offset = offset }
public fun skipValidation(skip: Boolean): Builder = apply { this.skipValidation = skip }
@Throws(IllegalArgumentException::class)
public fun build(): MangaSearchQuery {
return MangaSearchQuery(deduplicateCriteria(criteria), order, offset, skipValidation)
}
private fun deduplicateCriteria(criteria: Set<QueryCriteria<*>>): Set<QueryCriteria<*>> {
val uniqueCriteria =
ArrayMap<Pair<SearchableField, Class<out QueryCriteria<*>>>, QueryCriteria<*>>(criteria.size)
for (criterion in criteria) {
val key = criterion.field to criterion::class.java
val existing = uniqueCriteria[key]
when {
existing == null -> uniqueCriteria[key] = criterion
existing is QueryCriteria.Include<*> && criterion is QueryCriteria.Include<*> -> {
uniqueCriteria[key] =
QueryCriteria.Include(criterion.field, existing.values union criterion.values)
}
existing is QueryCriteria.Exclude<*> && criterion is QueryCriteria.Exclude<*> -> {
uniqueCriteria[key] =
QueryCriteria.Exclude(criterion.field, existing.values union criterion.values)
}
else -> throw IllegalArgumentException(
"Match and Range have only one criterion per type, but found duplicates for: ${criterion.field} in ${criterion::class.simpleName}",
)
}
}
return uniqueCriteria.values.toSet()
}
}
public companion object {
public val EMPTY: MangaSearchQuery = MangaSearchQuery(emptySet(), null, 0, false)
}
}

@ -0,0 +1,48 @@
package org.koitharu.kotatsu.parsers.model.search
import androidx.collection.ArraySet
import org.koitharu.kotatsu.parsers.model.search.QueryCriteria.*
import org.koitharu.kotatsu.parsers.util.mapToSet
@Deprecated("Too complex. Use MangaListFilterCapabilities instead")
@ExposedCopyVisibility
public data class MangaSearchQueryCapabilities internal constructor(
public val capabilities: Set<SearchCapability>,
) {
public constructor(vararg capabilities: SearchCapability) : this(ArraySet(capabilities))
internal fun validate(query: MangaSearchQuery) {
val strictFields = capabilities.filter { it.isExclusive }.mapToSet { it.field }
val usedStrictFields = query.criteria.mapToSet { it.field }.intersect(strictFields)
require(usedStrictFields.isEmpty() || query.criteria.size <= 1) {
"Query contains multiple criteria, but at least one field (${usedStrictFields.joinToString()}) does not support multiple criteria."
}
for (criterion in query.criteria) {
val capability = requireNotNull(capabilities.find { it.field == criterion.field }) {
"Unsupported search field: ${criterion.field}"
}
require(criterion::class in capability.criteriaTypes) {
"Unsupported search criterion: ${criterion::class.simpleName} for field ${criterion.field}"
}
// Ensure single value per criterion if supportMultiValue is false
if (!capability.isMultiple) {
when (criterion) {
is Include<*> -> require(criterion.values.size <= 1) {
"Multiple values are not allowed for field ${criterion.field}"
}
is Exclude<*> -> require(criterion.values.size <= 1) {
"Multiple values are not allowed for field ${criterion.field}"
}
is Range<*> -> Unit // Range is always valid (from, to)
is Match<*> -> Unit // Match always has a single value
}
}
}
}
}

@ -0,0 +1,106 @@
package org.koitharu.kotatsu.parsers.model.search
/**
* Represents a generic search criterion used for filtering manga search results.
* Each criterion applies a specific condition to a [SearchableField] and operates on values of type [T].
*
* @param T The type of value associated with the search criterion.
* @property field The field to which this search criterion applies.
*/
@Deprecated("Too complex")
public sealed interface QueryCriteria<T> {
public val field: SearchableField
override fun equals(other: Any?): Boolean
override fun hashCode(): Int
/**
* Represents an inclusion criterion that allows search results based on a set of allowed values.
*
* @param T The type of value being included in the search.
* @property values The set of values that should be included in the search results.
*
* ### Example Usage:
* ```kotlin
* val genreFilter = QueryCriteria.Include(SearchableField.STATE, setOf(MangaState.ONGOING, MangaState.FINISHED))
* ```
*/
public data class Include<T : Any>(
public override val field: SearchableField,
@JvmField public val values: Set<T>,
) : QueryCriteria<T> {
init {
check(values.all { x -> field.type.isInstance(x) })
}
}
/**
* Represents an exclusion criterion that exclude results containing certain values.
*
* @param T The type of value being excluded from the search.
* @property values The set of values that should be excluded from the search results.
*
* ### Example Usage:
* ```kotlin
* val excludeTag = QueryCriteria.Exclude(SearchableField.TAG, setOf(MangaTag(key, title, source)))
* ```
*/
public data class Exclude<T : Any>(
public override val field: SearchableField,
@JvmField public val values: Set<T>,
) : QueryCriteria<T> {
init {
check(values.all { x -> field.type.isInstance(x) })
}
}
/**
* Represents a range criterion that allows search based on a range of values.
*
* @param T The type of value used in the range (must be comparable).
* @property from The starting value of the range (inclusive).
* @property to The ending value of the range (inclusive).
*
* ### Example Usage:
* ```kotlin
* val yearRange = QueryCriteria.Range(SearchableField.PUBLICATION_YEAR, 2000, 2020)
* ```
*/
public data class Range<T : Comparable<T>>(
public override val field: SearchableField,
@JvmField public val from: T,
@JvmField public val to: T,
) : QueryCriteria<T> {
init {
check(field.type.isInstance(from))
check(field.type.isInstance(to))
}
}
/**
* Represents a match criterion that search results based on an exact match of a value.
*
* @param T The type of value being matched.
* @property value The exact value that must be matched.
*
* ### Example Usage:
* ```kotlin
* val titleMatch = QueryCriteria.Match(SearchableField.TITLE, "manga title")
* ```
*/
public data class Match<T : Any>(
public override val field: SearchableField,
@JvmField public val value: T,
) : QueryCriteria<T> {
init {
check(field.type.isInstance(value))
}
}
}

@ -0,0 +1,34 @@
package org.koitharu.kotatsu.parsers.model.search
import kotlin.reflect.KClass
/**
* Defines the search capabilities of a given field in the manga search query.
*
* @property field The searchable field that this capability applies to.
* Example values:
* - `SearchableField.TITLE_NAME` for searching by title.
* - `SearchableField.AUTHOR` for searching by author names.
* - `SearchableField.TAG` for filtering by tags.
* @property criteriaTypes The set of supported criteria types for the field.
* Example values:
* - `setOf(Include::class, Exclude::class)` selected field supports inclusion/exclusion criteria.
* - `setOf(Range::class)` selected field support numerical range criteria.
* @property isMultiValue Indicates whether the field supports multiple values.
* - `true` if multiple values can be provided (e.g., multiple tags or authors).
* - `false` if only a single value is allowed (e.g., only one tag or author).
* @property isExclusive Specifies whether the field can be used alongside other criteria.
* - `true` if this field can be used with other search criteria.
* - `false` if using this field requires it to be the only criterion in query.
*/
@Deprecated("Too complex")
public data class SearchCapability(
/** The searchable field that this capability applies to. */
@JvmField public val field: SearchableField,
/** The set of supported criteria types for this field. */
@JvmField public val criteriaTypes: Set<KClass<out QueryCriteria<*>>>,
/** Indicates whether the field supports multiple values. */
@JvmField public val isMultiple: Boolean,
/** Specifies whether the field can be used alongside other criteria. */
@JvmField public val isExclusive: Boolean = false,
)

@ -0,0 +1,24 @@
package org.koitharu.kotatsu.parsers.model.search
import org.koitharu.kotatsu.parsers.model.*
import java.util.*
/**
* Represents the various fields that can be used for searching manga.
* Each field is associated with a specific data type that defines its expected values.
*
* @property type The Java class representing the expected type of values for this field.
*/
@Deprecated("Too complex")
public enum class SearchableField(public val type: Class<*>) {
TITLE_NAME(String::class.java),
TAG(MangaTag::class.java),
AUTHOR(MangaTag::class.java),
LANGUAGE(Locale::class.java),
ORIGINAL_LANGUAGE(Locale::class.java),
STATE(MangaState::class.java),
CONTENT_TYPE(ContentType::class.java),
CONTENT_RATING(ContentRating::class.java),
DEMOGRAPHIC(Demographic::class.java),
PUBLICATION_YEAR(Int::class.javaObjectType);
}

@ -0,0 +1,47 @@
package org.koitharu.kotatsu.parsers.network
import okhttp3.CookieJar
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.Response
import org.jsoup.Jsoup
import java.net.HttpURLConnection.HTTP_FORBIDDEN
import java.net.HttpURLConnection.HTTP_UNAVAILABLE
public object CloudFlareHelper {
public const val PROTECTION_NOT_DETECTED: Int = 0
public const val PROTECTION_CAPTCHA: Int = 1
public const val PROTECTION_BLOCKED: Int = 2
private const val CF_CLEARANCE = "cf_clearance"
public fun checkResponseForProtection(response: Response): Int {
if (response.code != HTTP_FORBIDDEN && response.code != HTTP_UNAVAILABLE) {
return PROTECTION_NOT_DETECTED
}
val content = try {
response.peekBody(Long.MAX_VALUE).use {
Jsoup.parse(it.byteStream(), Charsets.UTF_8.name(), response.request.url.toString())
}
} catch (_: IllegalStateException) {
return PROTECTION_NOT_DETECTED
}
return when {
content.selectFirst("h2[data-translate=\"blocked_why_headline\"]") != null -> PROTECTION_BLOCKED
content.getElementById("challenge-error-title") != null || content.getElementById("challenge-error-text") != null -> PROTECTION_CAPTCHA
else -> PROTECTION_NOT_DETECTED
}
}
public fun getClearanceCookie(cookieJar: CookieJar, url: String): String? {
return cookieJar.loadForRequest(url.toHttpUrl()).find { it.name == CF_CLEARANCE }?.value
}
public fun isCloudFlareCookie(name: String): Boolean {
return name.startsWith("cf_")
|| name.startsWith("_cf")
|| name.startsWith("__cf")
|| name == "csrftoken"
}
}

@ -13,7 +13,7 @@ import org.koitharu.kotatsu.parsers.util.await
import org.koitharu.kotatsu.parsers.util.parseJson
import java.net.HttpURLConnection
class OkHttpWebClient(
public class OkHttpWebClient(
private val httpClient: OkHttpClient,
private val mangaSource: MangaSource,
) : WebClient {

@ -1,14 +1,17 @@
package org.koitharu.kotatsu.parsers.network
object UserAgents {
public object UserAgents {
const val CHROME_MOBILE =
public const val CHROME_MOBILE: String =
"Mozilla/5.0 (Linux; Android 13) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.5735.196 Mobile Safari/537.36"
const val CHROME_DESKTOP =
public const val FIREFOX_MOBILE: String =
"Mozilla/5.0 (Android 14; Mobile; LG-M255; rv:123.0) Gecko/123.0 Firefox/123.0"
public const val CHROME_DESKTOP: String =
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
const val FIREFOX_DESKTOP = "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/116.0"
public const val FIREFOX_DESKTOP: String = "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/116.0"
const val KOTATSU = "Kotatsu/5.3 (Android 13;;; en)"
public const val KOTATSU: String = "Kotatsu/6.8 (Android 13;;; en)"
}

@ -6,54 +6,55 @@ import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.Response
import org.json.JSONObject
interface WebClient {
public interface WebClient {
/**
* Do a GET http request to specific url
* @param url
*/
suspend fun httpGet(url: String): Response = httpGet(url.toHttpUrl())
public suspend fun httpGet(url: String): Response = httpGet(url.toHttpUrl())
suspend fun httpGet(url: String, extraHeaders: Headers?): Response = httpGet(url.toHttpUrl(), extraHeaders)
public suspend fun httpGet(url: String, extraHeaders: Headers?): Response = httpGet(url.toHttpUrl(), extraHeaders)
/**
* Do a GET http request to specific url
* @param url
*/
suspend fun httpGet(url: HttpUrl): Response = httpGet(url, null)
public suspend fun httpGet(url: HttpUrl): Response = httpGet(url, null)
/**
* Do a GET http request to specific url
* @param url
* @param extraHeaders additional HTTP headers for request
*/
suspend fun httpGet(url: HttpUrl, extraHeaders: Headers?): Response
public suspend fun httpGet(url: HttpUrl, extraHeaders: Headers?): Response
/**
* Do a HEAD http request to specific url
* @param url
*/
suspend fun httpHead(url: String): Response = httpHead(url.toHttpUrl())
public suspend fun httpHead(url: String): Response = httpHead(url.toHttpUrl())
/**
* Do a HEAD http request to specific url
* @param url
*/
suspend fun httpHead(url: HttpUrl): Response
public suspend fun httpHead(url: HttpUrl): Response
/**
* Do a POST http request to specific url with `multipart/form-data` payload
* @param url
* @param form payload as key=>value map
*/
suspend fun httpPost(url: String, form: Map<String, String>): Response = httpPost(url.toHttpUrl(), form, null)
public suspend fun httpPost(url: String, form: Map<String, String>): Response =
httpPost(url.toHttpUrl(), form, null)
/**
* Do a POST http request to specific url with `multipart/form-data` payload
* @param url
* @param form payload as key=>value map
*/
suspend fun httpPost(url: HttpUrl, form: Map<String, String>): Response = httpPost(url, form, null)
public suspend fun httpPost(url: HttpUrl, form: Map<String, String>): Response = httpPost(url, form, null)
/**
* Do a POST http request to specific url with `multipart/form-data` payload
@ -61,21 +62,21 @@ interface WebClient {
* @param form payload as key=>value map
* @param extraHeaders additional HTTP headers for request
*/
suspend fun httpPost(url: HttpUrl, form: Map<String, String>, extraHeaders: Headers?): Response
public suspend fun httpPost(url: HttpUrl, form: Map<String, String>, extraHeaders: Headers?): Response
/**
* Do a POST http request to specific url with `multipart/form-data` payload
* @param url
* @param payload payload as `key=value` string with `&` separator
*/
suspend fun httpPost(url: String, payload: String): Response = httpPost(url.toHttpUrl(), payload, null)
public suspend fun httpPost(url: String, payload: String): Response = httpPost(url.toHttpUrl(), payload, null)
/**
* Do a POST http request to specific url with `multipart/form-data` payload
* @param url
* @param payload payload as `key=value` string with `&` separator
*/
suspend fun httpPost(url: HttpUrl, payload: String): Response = httpPost(url, payload, null)
public suspend fun httpPost(url: HttpUrl, payload: String): Response = httpPost(url, payload, null)
/**
* Do a POST http request to specific url with `multipart/form-data` payload
@ -83,21 +84,21 @@ interface WebClient {
* @param payload payload as `key=value` string with `&` separator
* @param extraHeaders additional HTTP headers for request
*/
suspend fun httpPost(url: HttpUrl, payload: String, extraHeaders: Headers?): Response
public suspend fun httpPost(url: HttpUrl, payload: String, extraHeaders: Headers?): Response
/**
* Do a POST http request to specific url with json payload
* @param url
* @param body
*/
suspend fun httpPost(url: String, body: JSONObject): Response = httpPost(url.toHttpUrl(), body, null)
public suspend fun httpPost(url: String, body: JSONObject): Response = httpPost(url.toHttpUrl(), body, null)
/**
* Do a POST http request to specific url with json payload
* @param url
* @param body
*/
suspend fun httpPost(url: HttpUrl, body: JSONObject): Response = httpPost(url, body, null)
public suspend fun httpPost(url: HttpUrl, body: JSONObject): Response = httpPost(url, body, null)
/**
* Do a POST http request to specific url with json payload
@ -105,12 +106,12 @@ interface WebClient {
* @param body
* @param extraHeaders additional HTTP headers for request
*/
suspend fun httpPost(url: HttpUrl, body: JSONObject, extraHeaders: Headers?): Response
public suspend fun httpPost(url: HttpUrl, body: JSONObject, extraHeaders: Headers?): Response
/**
* Do a GraphQL request to specific url
* @param endpoint an url
* @param query GraphQL request payload
*/
suspend fun graphQLQuery(endpoint: String, query: String): JSONObject
public suspend fun graphQLQuery(endpoint: String, query: String): JSONObject
}

@ -5,13 +5,13 @@ import org.json.JSONArray
import org.json.JSONObject
import org.jsoup.nodes.Element
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.exception.ParseException
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.isNullOrEmpty
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import java.util.*
@ -22,23 +22,77 @@ import javax.crypto.spec.SecretKeySpec
@MangaSourceParser("BATOTO", "Bato.To")
internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
context = context,
source = MangaSource.BATOTO,
source = MangaParserSource.BATOTO,
pageSize = 60,
searchPageSize = 20,
) {
), MangaParserAuthProvider {
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val authUrl: String
get() = "https://${domain}/signin"
override suspend fun isAuthorized(): Boolean {
return context.cookieJar.getCookies(domain).any {
it.name.contains("skey")
}
}
override suspend fun getUsername(): String {
val body = webClient.httpGet("https://${domain}/account/profiles").parseHtml().body()
return body.selectFirst("ul.toggleMenu-content:has(.avatar):has(a) div.text-center div")?.text()
?: body.parseFailed("Cannot find username")
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.NEWEST,
SortOrder.ALPHABETICAL,
SortOrder.UPDATED,
SortOrder.NEWEST,
SortOrder.POPULARITY,
SortOrder.ALPHABETICAL,
SortOrder.POPULARITY_YEAR,
SortOrder.POPULARITY_MONTH,
SortOrder.POPULARITY_WEEK,
SortOrder.POPULARITY_TODAY,
SortOrder.POPULARITY_HOUR,
)
override val availableStates: Set<MangaState> = EnumSet.allOf(MangaState::class.java)
override val isTagsExclusionSupported: Boolean = true
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
isOriginalLocaleSupported = true,
)
override val availableContentRating: Set<ContentRating> = EnumSet.of(ContentRating.SAFE)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableStates = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
MangaState.ABANDONED,
MangaState.PAUSED,
MangaState.UPCOMING,
),
availableContentRating = EnumSet.of(ContentRating.SAFE),
availableLocales = setOf(
Locale.CHINESE, Locale.ENGLISH, Locale.US, Locale.FRENCH, Locale.GERMAN, Locale.ITALIAN, Locale.JAPANESE,
Locale("af"), Locale("ar"), Locale("az"), Locale("eu"), Locale("be"),
Locale("bn"), Locale("bs"), Locale("bg"), Locale("my"), Locale("km"),
Locale("ceb"), Locale("zh_hk"), Locale("zh_tw"), Locale("hr"), Locale("cs"),
Locale("da"), Locale("nl"), Locale("eo"), Locale("et"), Locale("fil"),
Locale("fi"), Locale("ka"), Locale("el"), Locale("ht"), Locale("he"),
Locale("hi"), Locale("hu"), Locale("id"), Locale("kk"), Locale("ko"),
Locale("lv"), Locale("ms"), Locale("ml"), Locale("mo"), Locale("mn"),
Locale("ne"), Locale("no"), Locale("fa"), Locale("pl"), Locale("pt"),
Locale("pt_br"), Locale("pt_pt"), Locale("ro"), Locale("ru"), Locale("sr"),
Locale("si"), Locale("sk"), Locale("es"), Locale("es_419"), Locale("ta"),
Locale("te"), Locale("th"), Locale("ti"), Locale("tr"), Locale("uk"),
Locale("vi"), Locale("zu"),
),
)
override val configKeyDomain = ConfigKey.Domain(
"bato.to",
@ -66,27 +120,33 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
"zbato.com",
"zbato.net",
"zbato.org",
"fto.to",
"jto.to",
)
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
when (filter) {
is MangaListFilter.Search -> {
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
when {
!filter.query.isNullOrEmpty() -> {
return search(page, filter.query)
}
is MangaListFilter.Advanced -> {
else -> {
val url = buildString {
append("https://")
append(domain)
append("/browse?sort=")
when (filter.sortOrder) {
when (order) {
SortOrder.UPDATED -> append("update.za")
SortOrder.POPULARITY -> append("views_a.za")
SortOrder.NEWEST -> append("create.za")
SortOrder.ALPHABETICAL -> append("title.az")
SortOrder.POPULARITY_YEAR -> append("views_y.za")
SortOrder.POPULARITY_MONTH -> append("views_m.za")
SortOrder.POPULARITY_WEEK -> append("views_w.za")
SortOrder.POPULARITY_TODAY -> append("views_d.za")
SortOrder.POPULARITY_HOUR -> append("views_h.za")
else -> append("update.za")
}
@ -99,23 +159,38 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
MangaState.ABANDONED -> "cancelled"
MangaState.PAUSED -> "hiatus"
MangaState.UPCOMING -> "pending"
else -> throw IllegalArgumentException("$it not supported")
},
)
}
filter.locale?.let {
append("&langs=")
append(it.language)
if (it.language == "in") {
append("id")
} else {
append(it.language)
}
}
filter.originalLocale?.let {
append("&origs=")
if (it.language == "in") {
append("id")
} else {
append(it.language)
}
}
append("&genres=")
if (filter.tags.isNotEmpty()) {
appendAll(filter.tags, ",") { it.key }
filter.tags.joinTo(this, ",") { it.key }
}
append("|")
if (filter.tagsExclude.isNotEmpty()) {
appendAll(filter.tagsExclude, ",") { it.key }
filter.tagsExclude.joinTo(this, ",") { it.key }
}
if (filter.contentRating.isNotEmpty()) {
@ -135,17 +210,6 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
return parseList(url, page)
}
null -> {
val url = buildString {
append("https://")
append(domain)
append("/browse?sort=update.za")
append("&page=")
append(page.toString())
}
return parseList(url, page)
}
}
}
@ -154,11 +218,16 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
.requireElementById("mainer")
val details = root.selectFirstOrThrow(".detail-set")
val attrs = details.selectFirst(".attr-main")?.select(".attr-item")?.associate {
it.child(0).text().trim() to it.child(1)
it.child(0).text() to it.child(1)
}.orEmpty()
val author = attrs["Authors:"]?.textOrNull()
return manga.copy(
title = root.selectFirst("h3.item-title")?.text() ?: manga.title,
isNsfw = !root.selectFirst("alert")?.getElementsContainingOwnText("NSFW").isNullOrEmpty(),
contentRating = if (root.selectFirst("alert")?.getElementsContainingOwnText("NSFW").isNullOrEmpty()) {
ContentRating.ADULT
} else {
ContentRating.SAFE
},
largeCoverUrl = details.selectFirst("img[src]")?.absUrl("src"),
description = details.getElementById("limit-height-body-summary")
?.selectFirst(".limit-html")
@ -171,7 +240,7 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
"Hiatus" -> MangaState.PAUSED
else -> manga.state
},
author = attrs["Authors:"]?.text()?.trim() ?: manga.author,
authors = author?.let { setOf(it) } ?: manga.authors,
chapters = root.selectFirst(".episode-list")
?.selectFirst(".main")
?.children()
@ -216,7 +285,7 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
throw ParseException("Cannot find images list", fullUrl)
}
override suspend fun getAvailableTags(): Set<MangaTag> {
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val scripts = webClient.httpGet(
"https://${domain}/browse",
).parseHtml().selectOrThrow("script")
@ -237,22 +306,6 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
throw ParseException("Cannot find gernes list", scripts[0].baseUri())
}
override suspend fun getAvailableLocales(): Set<Locale> = setOf(
Locale.CHINESE, Locale.ENGLISH, Locale.US, Locale.FRENCH, Locale.GERMAN, Locale.ITALIAN, Locale.JAPANESE,
Locale("af"), Locale("ar"), Locale("az"), Locale("eu"), Locale("be"),
Locale("bn"), Locale("bs"), Locale("bg"), Locale("my"), Locale("km"),
Locale("ceb"), Locale("zh_hk"), Locale("zh_tw"), Locale("hr"), Locale("cs"),
Locale("da"), Locale("nl"), Locale("eo"), Locale("et"), Locale("fil"),
Locale("fi"), Locale("ka"), Locale("el"), Locale("ht"), Locale("he"),
Locale("hi"), Locale("hu"), Locale("id"), Locale("kk"), Locale("ko"),
Locale("lv"), Locale("ms"), Locale("ml"), Locale("mo"), Locale("mn"),
Locale("ne"), Locale("no"), Locale("fa"), Locale("pl"), Locale("pt"),
Locale("pt_br"), Locale("pt_pt"), Locale("ro"), Locale("ru"), Locale("sr"),
Locale("si"), Locale("sk"), Locale("es"), Locale("es_419"), Locale("ta"),
Locale("te"), Locale("th"), Locale("ti"), Locale("tr"), Locale("uk"),
Locale("vi"), Locale("zu"),
)
private suspend fun search(page: Int, query: String): List<Manga> {
val url = buildString {
append("https://")
@ -287,17 +340,17 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
Manga(
id = generateUid(href),
title = title,
altTitle = div.selectFirst(".item-alias")?.text()?.takeUnless { it == title },
altTitles = setOfNotNull(div.selectFirst(".item-alias")?.textOrNull()?.takeUnless { it == title }),
url = href,
publicUrl = a.absUrl("href"),
rating = RATING_UNKNOWN,
isNsfw = false,
coverUrl = div.selectFirst("img[src]")?.absUrl("src").orEmpty(),
contentRating = null,
coverUrl = div.selectFirst("img[src]")?.absUrl("src"),
largeCoverUrl = null,
description = null,
tags = div.selectFirst(".item-genre")?.parseTags().orEmpty(),
state = null,
author = null,
authors = emptySet(),
source = source,
)
}
@ -318,8 +371,9 @@ internal class BatoToParser(context: MangaLoaderContext) : PagedMangaParser(
val href = a.attrAsRelativeUrl("href")
return MangaChapter(
id = generateUid(href),
name = a.text(),
number = index + 1,
title = a.textOrNull(),
number = index + 1f,
volume = 0,
url = href,
scanlator = extra?.getElementsByAttributeValueContaining("href", "/group/")?.text(),
uploadDate = runCatching {

@ -2,41 +2,68 @@ package org.koitharu.kotatsu.parsers.site.all
import androidx.collection.ArraySet
import androidx.collection.SparseArrayCompat
import okhttp3.HttpUrl
import org.json.JSONArray
import org.json.JSONObject
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.*
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.text.SimpleDateFormat
import java.util.*
/**
* cc
*/
private const val CHAPTERS_LIMIT = 99999
@MangaSourceParser("COMICK_FUN", "ComicK")
internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(context, MangaSource.COMICK_FUN, 20) {
internal class ComickFunParser(context: MangaLoaderContext) :
PagedMangaParser(context, MangaParserSource.COMICK_FUN, 20) {
override val configKeyDomain = ConfigKey.Domain("comick.io")
override val configKeyDomain = ConfigKey.Domain("comick.cc")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.POPULARITY,
SortOrder.UPDATED,
SortOrder.RATING,
SortOrder.NEWEST,
)
override val availableStates: Set<MangaState> =
EnumSet.of(MangaState.ONGOING, MangaState.FINISHED, MangaState.PAUSED, MangaState.ABANDONED)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
isSearchWithFiltersSupported = true,
isYearRangeSupported = true,
)
private val tagsArray = SuspendLazy(::loadTags)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableStates = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED, MangaState.PAUSED, MangaState.ABANDONED),
availableContentTypes = EnumSet.of(
ContentType.MANGA,
ContentType.MANHWA,
ContentType.MANHUA,
ContentType.OTHER,
),
availableDemographics = EnumSet.of(
Demographic.SHOUNEN,
Demographic.SHOUJO,
Demographic.SEINEN,
Demographic.JOSEI,
Demographic.NONE,
),
)
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val domain = domain
val url = urlBuilder()
.host("api.$domain")
@ -46,41 +73,78 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
.addQueryParameter("tachiyomi", "true")
.addQueryParameter("limit", pageSize.toString())
.addQueryParameter("page", page.toString())
when (filter) {
is MangaListFilter.Search -> {
url.addQueryParameter("q", filter.query)
}
null -> {
url.addQueryParameter("sort", "view")
}
filter.query?.let {
url.addQueryParameter("q", filter.query)
}
is MangaListFilter.Advanced -> {
filter.tags.forEach { tag ->
url.addQueryParameter("genres", tag.key)
}
url.addQueryParameter(
"sort",
when (filter.sortOrder) {
SortOrder.POPULARITY -> "view"
SortOrder.RATING -> "rating"
else -> "uploaded"
},
)
filter.states.oneOrThrowIfMany()?.let {
url.addQueryParameter(
"status",
when (it) {
MangaState.ONGOING -> "1"
MangaState.FINISHED -> "2"
MangaState.ABANDONED -> "3"
MangaState.PAUSED -> "4"
else -> ""
},
)
}
}
filter.tags.forEach {
url.addQueryParameter("genres", it.key)
}
filter.tagsExclude.forEach {
url.addQueryParameter("excludes", it.key)
}
url.addQueryParameter(
"sort",
when (order) {
SortOrder.NEWEST -> "created_at"
SortOrder.POPULARITY -> "view"
SortOrder.RATING -> "rating"
SortOrder.UPDATED -> "uploaded"
else -> "uploaded"
},
)
filter.states.oneOrThrowIfMany()?.let {
url.addQueryParameter(
"status",
when (it) {
MangaState.ONGOING -> "1"
MangaState.FINISHED -> "2"
MangaState.ABANDONED -> "3"
MangaState.PAUSED -> "4"
else -> ""
},
)
}
if (filter.yearFrom != YEAR_UNKNOWN) {
url.addQueryParameter("from", filter.yearFrom.toString())
}
if (filter.yearTo != YEAR_UNKNOWN) {
url.addQueryParameter("to", filter.yearTo.toString())
}
filter.types.forEach {
url.addQueryParameter(
"country",
when (it) {
ContentType.MANGA -> "jp"
ContentType.MANHWA -> "kr"
ContentType.MANHUA -> "cn"
ContentType.OTHER -> "others"
else -> ""
},
)
}
filter.demographics.forEach {
url.addQueryParameter(
"demographic",
when (it) {
Demographic.SHOUNEN -> "1"
Demographic.SHOUJO -> "2"
Demographic.SEINEN -> "3"
Demographic.JOSEI -> "4"
Demographic.NONE -> "5"
else -> ""
},
)
}
val ja = webClient.httpGet(url.build()).parseJsonArray()
val tagsMap = tagsArray.get()
return ja.mapJSON { jo ->
@ -88,12 +152,12 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
Manga(
id = generateUid(slug),
title = jo.getString("title"),
altTitle = null,
altTitles = emptySet(),
url = slug,
publicUrl = "https://$domain/comic/$slug",
rating = jo.getDoubleOrDefault("rating", -10.0).toFloat() / 10f,
isNsfw = false,
coverUrl = jo.getString("cover_url"),
contentRating = null,
coverUrl = jo.getStringOrNull("cover_url"),
largeCoverUrl = null,
description = jo.getStringOrNull("desc"),
tags = jo.selectGenres(tagsMap),
@ -104,7 +168,7 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
4 -> MangaState.PAUSED
else -> null
},
author = null,
authors = emptySet(),
source = source,
)
}
@ -115,11 +179,17 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
val url = "https://api.$domain/comic/${manga.url}?tachiyomi=true"
val jo = webClient.httpGet(url).parseJson()
val comic = jo.getJSONObject("comic")
var alt = ""
comic.getJSONArray("md_titles").mapJSON { alt += it.getString("title") + " - " }
val alt = comic.getJSONArray("md_titles").asTypedList<JSONObject>().mapNotNullToSet {
it.getStringOrNull("title")
}
val authors = jo.getJSONArray("artists").mapJSONNotNullToSet { it.getStringOrNull("name") }
return manga.copy(
altTitle = alt.ifEmpty { comic.getStringOrNull("title") },
isNsfw = jo.getBoolean("matureContent") || comic.getBoolean("hentai"),
altTitles = alt,
contentRating = when {
comic.getBooleanOrDefault("hentai", false) -> ContentRating.ADULT
jo.getBooleanOrDefault("matureContent", false) -> ContentRating.SUGGESTIVE
else -> ContentRating.SAFE
},
description = comic.getStringOrNull("parsed") ?: comic.getStringOrNull("desc"),
tags = manga.tags + comic.getJSONArray("md_comic_md_genres").mapJSONToSet {
val g = it.getJSONObject("md_genres")
@ -129,11 +199,44 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
source = source,
)
},
author = jo.getJSONArray("artists").optJSONObject(0)?.getString("name"),
authors = authors,
chapters = getChapters(comic.getString("hid")),
)
}
private suspend fun getChapters(hid: String): List<MangaChapter> {
val ja = webClient.httpGet(
url = "https://api.${domain}/comic/$hid/chapters?limit=$CHAPTERS_LIMIT",
).parseJson().getJSONArray("chapters")
val dateFormat = SimpleDateFormat("yyyy-MM-dd")
return ja.asTypedList<JSONObject>().reversed().mapChapters { _, jo ->
val vol = jo.getIntOrDefault("vol", 0)
val chap = jo.getFloatOrDefault("chap", 0f)
val locale = Locale.forLanguageTag(jo.getString("lang"))
val group = jo.optJSONArray("group_name")?.joinToString(", ")
val branch = buildString {
append(locale.getDisplayName(locale).toTitleCase(locale))
if (!group.isNullOrEmpty()) {
append(" (")
append(group)
append(')')
}
}
MangaChapter(
id = generateUid(jo.getLong("id")),
title = jo.getStringOrNull("title"),
number = chap,
volume = vol,
url = jo.getString("hid"),
scanlator = jo.optJSONArray("group_name")?.asTypedList<String>()?.joinToString()
?.takeUnless { it.isBlank() },
uploadDate = dateFormat.parseSafe(jo.getString("created_at").substringBefore('T')),
branch = branch,
source = source,
)
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val jo = webClient.httpGet(
"https://api.${domain}/chapter/${chapter.url}?tachiyomi=true",
@ -149,7 +252,14 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
}
}
override suspend fun getAvailableTags(): Set<MangaTag> {
override suspend fun resolveLink(resolver: LinkResolver, link: HttpUrl): Manga? {
val slug = link.pathSegments.lastOrNull() ?: return null
return resolver.resolveManga(this, url = slug, id = generateUid(slug))
}
private val tagsArray = suspendLazy(initializer = ::loadTags)
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val sparseArray = tagsArray.get()
val set = ArraySet<MangaTag>(sparseArray.size())
for (i in 0 until sparseArray.size()) {
@ -161,7 +271,7 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
private suspend fun loadTags(): SparseArrayCompat<MangaTag> {
val ja = webClient.httpGet("https://api.${domain}/genre").parseJsonArray()
val tags = SparseArrayCompat<MangaTag>(ja.length())
for (jo in ja.JSONIterator()) {
for (jo in ja.asTypedList<JSONObject>()) {
tags.append(
jo.getInt("id"),
MangaTag(
@ -174,77 +284,6 @@ internal class ComickFunParser(context: MangaLoaderContext) : PagedMangaParser(c
return tags
}
private suspend fun getChapters(hid: String): List<MangaChapter> {
val ja = webClient.httpGet(
url = "https://api.${domain}/comic/$hid/chapters?limit=$CHAPTERS_LIMIT",
).parseJson().getJSONArray("chapters")
val dateFormat = SimpleDateFormat("yyyy-MM-dd")
val counters = HashMap<String?, Int>()
return ja.toJSONList().reversed().mapChapters { _, jo ->
val vol = jo.getStringOrNull("vol")
val chap = jo.getStringOrNull("chap")
val locale = Locale.forLanguageTag(jo.getString("lang"))
val group = jo.optJSONArray("group_name")?.joinToString(", ")
val branch = buildString {
append(locale.getDisplayName(locale).toTitleCase(locale))
if (!group.isNullOrEmpty()) {
append(" (")
append(group)
append(')')
}
}
MangaChapter(
id = generateUid(jo.getLong("id")),
name = buildString {
vol?.let { append("Vol ").append(it).append(' ') }
chap?.let { append("Chap ").append(it) }
jo.getStringOrNull("title")?.let { append(": ").append(it) }
},
number = counters.incrementAndGet(branch),
url = jo.getString("hid"),
scanlator = jo.optJSONArray("group_name")?.asIterable<String>()?.joinToString()
?.takeUnless { it.isBlank() },
uploadDate = dateFormat.tryParse(jo.getString("created_at").substringBefore('T')),
branch = branch,
source = source,
)
}
/*val chaptersBuilder = ChaptersListBuilder(list.size)
val branchedChapters = HashMap<String?, HashMap<Pair<String?, String?>, MangaChapter>>()
for (jo in list) {
val vol = jo.getStringOrNull("vol")
val chap = jo.getStringOrNull("chap")
val volChap = vol to chap
val locale = Locale.forLanguageTag(jo.getString("lang"))
val lc = locale.getDisplayName(locale).toTitleCase(locale)
val branch = (list.indices).firstNotNullOf { i ->
val b = if (i == 0) lc else "$lc ($i)"
if (branchedChapters[b]?.get(volChap) == null) b else null
}
val chapter = MangaChapter(
id = generateUid(jo.getLong("id")),
name = buildString {
vol?.let { append("Vol ").append(it).append(' ') }
chap?.let { append("Chap ").append(it) }
jo.getStringOrNull("title")?.let { append(": ").append(it) }
},
number = branchedChapters[branch]?.size?.plus(1) ?: 1,
url = jo.getString("hid"),
scanlator = jo.optJSONArray("group_name")?.asIterable<String>()?.joinToString()
?.takeUnless { it.isBlank() },
uploadDate = dateFormat.tryParse(jo.getString("created_at").substringBefore('T')),
branch = branch,
source = source,
)
if (chaptersBuilder.add(chapter)) {
branchedChapters.getOrPut(branch, ::HashMap)[volChap] = chapter
}
}
return chaptersBuilder.toList()*/
}
private fun JSONObject.selectGenres(tags: SparseArrayCompat<MangaTag>): Set<MangaTag> {
val array = optJSONArray("genres") ?: return emptySet()
val res = ArraySet<MangaTag>(array.length())

@ -1,420 +1,497 @@
package org.koitharu.kotatsu.parsers.site.all
import androidx.collection.ArrayMap
import androidx.collection.ArraySet
import androidx.collection.SparseArrayCompat
import androidx.collection.set
import androidx.collection.MutableIntLongMap
import androidx.collection.MutableIntObjectMap
import okhttp3.HttpUrl.Companion.toHttpUrlOrNull
import okhttp3.Interceptor
import okhttp3.Response
import okhttp3.internal.closeQuietly
import org.jsoup.internal.StringUtil
import org.jsoup.nodes.Element
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.bitmap.Rect
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.exception.AuthRequiredException
import org.koitharu.kotatsu.parsers.exception.TooManyRequestExceptions
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.text.SimpleDateFormat
import java.util.*
import kotlin.math.pow
import java.util.Collections.emptyList
import java.util.concurrent.TimeUnit
private const val DOMAIN_UNAUTHORIZED = "e-hentai.org"
private const val DOMAIN_AUTHORIZED = "exhentai.org"
private val TAG_PREFIXES = arrayOf("male:", "female:", "other:")
private const val BANNED_RESPONSE_LENGTH = 256L
@MangaSourceParser("EXHENTAI", "ExHentai", type = ContentType.HENTAI)
internal class ExHentaiParser(
context: MangaLoaderContext,
) : PagedMangaParser(context, MangaSource.EXHENTAI, pageSize = 25), MangaParserAuthProvider {
override val availableSortOrders: Set<SortOrder> = setOf(SortOrder.NEWEST)
override val isTagsExclusionSupported: Boolean = true
override val configKeyDomain: ConfigKey.Domain
get() = ConfigKey.Domain(
if (isAuthorized) DOMAIN_AUTHORIZED else DOMAIN_UNAUTHORIZED,
if (isAuthorized) DOMAIN_UNAUTHORIZED else DOMAIN_AUTHORIZED,
)
override val authUrl: String
get() = "https://${domain}/bounce_login.php"
private val ratingPattern = Regex("-?[0-9]+px")
private val authCookies = arrayOf("ipb_member_id", "ipb_pass_hash")
private var updateDm = false
private val nextPages = SparseArrayCompat<Long>()
private val suspiciousContentKey = ConfigKey.ShowSuspiciousContent(false)
private val tagsMap = SuspendLazy(::fetchTags)
override val isAuthorized: Boolean
get() {
val authorized = isAuthorized(DOMAIN_UNAUTHORIZED)
if (authorized) {
if (!isAuthorized(DOMAIN_AUTHORIZED)) {
context.cookieJar.copyCookies(
DOMAIN_UNAUTHORIZED,
DOMAIN_AUTHORIZED,
authCookies,
)
context.cookieJar.insertCookies(DOMAIN_AUTHORIZED, "yay=louder")
}
return true
}
return false
}
init {
context.cookieJar.insertCookies(DOMAIN_AUTHORIZED, "nw=1", "sl=dm_2")
context.cookieJar.insertCookies(DOMAIN_UNAUTHORIZED, "nw=1", "sl=dm_2")
paginator.firstPage = 0
}
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
val next = nextPages.get(page, 0L)
if (page > 0 && next == 0L) {
assert(false) { "Page timestamp not found" }
return emptyList()
}
var search = ""
val url = buildString {
append("https://")
append(domain)
append("/?next=")
append(next)
when (filter) {
is MangaListFilter.Search -> {
search += filter.query.urlEncoded()
append("&f_search=")
append(search.trim().replace(' ', '+'))
}
is MangaListFilter.Advanced -> {
filter.toSearchQuery()?.let { sq ->
append("&f_search=")
append(sq.urlEncoded())
}
val catsOn = filter.tags.mapNotNullToSet { it.key.toIntOrNull() }
val catsOff = filter.tagsExclude.mapNotNullToSet { it.key.toIntOrNull() }
if (catsOff.size >= 10) {
return emptyList()
}
var fCats = catsOn.fold(0, Int::or)
if (fCats != 0) {
fCats = 1023 - fCats
}
fCats = catsOff.fold(fCats, Int::or)
if (fCats != 0) {
append("&f_cats=")
append(fCats)
}
}
null -> {}
}
// by unknown reason cookie "sl=dm_2" is ignored, so, we should request it again
if (updateDm) {
append("&inline_set=dm_e")
}
append("&advsearch=1")
if (config[suspiciousContentKey]) {
append("&f_sh=on")
}
}
val body = webClient.httpGet(url).parseHtml().body()
val root = body.selectFirst("table.itg")
?.selectFirst("tbody")
?: if (updateDm) {
body.parseFailed("Cannot find root")
} else {
updateDm = true
return getListPage(page, filter)
}
updateDm = false
nextPages[page + 1] = getNextTimestamp(body)
return root.children().mapNotNull { tr ->
if (tr.childrenSize() != 2) return@mapNotNull null
val (td1, td2) = tr.children()
val gLink = td2.selectFirstOrThrow("div.glink")
val a = gLink.parents().select("a").first() ?: gLink.parseFailed("link not found")
val href = a.attrAsRelativeUrl("href")
val tagsDiv = gLink.nextElementSibling() ?: gLink.parseFailed("tags div not found")
val mainTag = td2.selectFirst("div.cn")?.let { div ->
MangaTag(
title = div.text().toTitleCase(),
key = tagIdByClass(div.classNames()) ?: return@let null,
source = source,
)
}
Manga(
id = generateUid(href),
title = gLink.text().cleanupTitle(),
altTitle = null,
url = href,
publicUrl = a.absUrl("href"),
rating = td2.selectFirst("div.ir")?.parseRating() ?: RATING_UNKNOWN,
isNsfw = true,
coverUrl = td1.selectFirst("img")?.absUrl("src").orEmpty(),
tags = setOfNotNull(mainTag),
state = null,
author = tagsDiv.getElementsContainingOwnText("artist:").first()
?.nextElementSibling()?.text(),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val root = doc.body().selectFirstOrThrow("div.gm")
val cover = root.getElementById("gd1")?.children()?.first()
val title = root.getElementById("gd2")
val tagList = root.getElementById("taglist")
val tabs = doc.body().selectFirst("table.ptt")?.selectFirst("tr")
val lang = root.getElementById("gd3")
?.selectFirst("tr:contains(Language)")
?.selectFirst(".gdt2")?.ownTextOrNull()
val tagMap = tagsMap.get()
val tags = ArraySet<MangaTag>()
tagList?.selectFirst("tr:contains(female:)")?.select("a")?.mapNotNullTo(tags) { tagMap[it.text()] }
tagList?.selectFirst("tr:contains(male:)")?.select("a")?.mapNotNullTo(tags) { tagMap[it.text()] }
return manga.copy(
title = title?.getElementById("gn")?.text()?.cleanupTitle() ?: manga.title,
altTitle = title?.getElementById("gj")?.text()?.cleanupTitle() ?: manga.altTitle,
publicUrl = doc.baseUri().ifEmpty { manga.publicUrl },
rating = root.getElementById("rating_label")?.text()
?.substringAfterLast(' ')
?.toFloatOrNull()
?.div(5f) ?: manga.rating,
largeCoverUrl = cover?.styleValueOrNull("background")?.cssUrl(),
tags = tags,
description = tagList?.select("tr")?.joinToString("<br>") { tr ->
val (tc, td) = tr.children()
val subTags = td.select("a").joinToString { it.html() }
"<b>${tc.html()}</b> $subTags"
},
chapters = tabs?.select("a")?.findLast { a ->
a.text().toIntOrNull() != null
}?.let { a ->
val count = a.text().toInt()
val chapters = ChaptersListBuilder(count)
for (i in 1..count) {
val url = "${manga.url}?p=${i - 1}"
chapters += MangaChapter(
id = generateUid(url),
name = "${manga.title} #$i",
number = i,
url = url,
uploadDate = 0L,
source = source,
scanlator = null,
branch = lang,
)
}
chapters.toList()
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
val root = doc.body().requireElementById("gdt")
return root.select("a").map { a ->
val url = a.attrAsRelativeUrl("href")
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
override suspend fun getPageUrl(page: MangaPage): String {
val doc = webClient.httpGet(page.url.toAbsoluteUrl(domain)).parseHtml()
return doc.body().requireElementById("img").attrAsAbsoluteUrl("src")
}
private val tags =
"ahegao,anal,angel,apron,bandages,bbw,bdsm,beauty mark,big areolae,big ass,big breasts,big clit,big lips," +
"big nipples,bikini,blackmail,bloomers,blowjob,bodysuit,bondage,breast expansion,bukkake,bunny girl,business suit," +
"catgirl,centaur,cheating,chinese dress,christmas,collar,corset,cosplaying,cowgirl,crossdressing,cunnilingus," +
"dark skin,daughter,deepthroat,defloration,demon girl,double penetration,dougi,dragon,drunk,elf,exhibitionism,farting," +
"females only,femdom,filming,fingering,fishnets,footjob,fox girl,furry,futanari,garter belt,ghost,giantess," +
"glasses,gloves,goblin,gothic lolita,growth,guro,gyaru,hair buns,hairy,hairy armpits,handjob,harem,hidden sex," +
"horns,huge breasts,humiliation,impregnation,incest,inverted nipples,kemonomimi,kimono,kissing,lactation," +
"latex,leg lock,leotard,lingerie,lizard girl,maid,masked face,masturbation,midget,miko,milf,mind break," +
"mind control,monster girl,mother,muscle,nakadashi,netorare,nose hook,nun,nurse,oil,paizuri,panda girl," +
"pantyhose,piercing,pixie cut,policewoman,ponytail,pregnant,rape,rimjob,robot,scat,lolicon,schoolgirl uniform," +
"sex toys,shemale,sister,small breasts,smell,sole dickgirl,sole female,squirting,stockings,sundress,sweating," +
"swimsuit,swinging,tail,tall girl,teacher,tentacles,thigh high boots,tomboy,transformation,twins,twintails," +
"unusual pupils,urination,vore,vtuber,widow,wings,witch,wolf girl,x-ray,yuri,zombie,sole male,males only,yaoi," +
"tomgirl,tall man,oni,shotacon,prostate massage,policeman,males only,huge penis,fox boy,feminization,dog boy,dickgirl on male,big penis"
override suspend fun getAvailableTags(): Set<MangaTag> {
return tagsMap.get().values.toSet()
}
private suspend fun fetchTags(): Map<String, MangaTag> {
val tagMap = ArrayMap<String, MangaTag>()
val tagElements = tags.split(",")
for (el in tagElements) {
if (el.isEmpty()) continue
tagMap[el] = MangaTag(
title = el.toTitleCase(Locale.ENGLISH),
key = el,
source = source,
)
}
val doc = webClient.httpGet("https://${domain}").parseHtml()
val root = doc.body().requireElementById("searchbox").selectFirstOrThrow("table")
root.select("div.cs").mapNotNullToSet { div ->
val id = div.id().substringAfterLast('_').toIntOrNull() ?: return@mapNotNullToSet null
val name = div.text().toTitleCase(Locale.ENGLISH)
tagMap[name] = MangaTag(
title = "Kind: $name",
key = id.toString(),
source = source,
)
}
return tagMap
}
override suspend fun getAvailableLocales(): Set<Locale> = setOf(
Locale.JAPANESE,
Locale.ENGLISH,
Locale.CHINESE,
Locale("nl"),
Locale.FRENCH,
Locale.GERMAN,
Locale("hu"),
Locale.ITALIAN,
Locale("kr"),
Locale("pl"),
Locale("pt"),
Locale("ru"),
Locale("es"),
Locale("th"),
Locale("vi"),
)
private fun Locale.toLanguagePath() = when (language) {
else -> getDisplayLanguage(Locale.ENGLISH).lowercase()
}
override suspend fun getUsername(): String {
val doc = webClient.httpGet("https://forums.$DOMAIN_UNAUTHORIZED/").parseHtml().body()
val username = doc.getElementById("userlinks")
?.getElementsByAttributeValueContaining("href", "showuser=")
?.firstOrNull()
?.ownText()
?: if (doc.getElementById("userlinksguest") != null) {
throw AuthRequiredException(source)
} else {
doc.parseFailed()
}
return username
}
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(suspiciousContentKey)
}
private fun isAuthorized(domain: String): Boolean {
val cookies = context.cookieJar.getCookies(domain).mapToSet { x -> x.name }
return authCookies.all { it in cookies }
}
private fun Element.parseRating(): Float {
return runCatching {
val style = requireNotNull(attr("style"))
val (v1, v2) = ratingPattern.find(style)!!.destructured
var p1 = v1.dropLast(2).toInt()
val p2 = v2.dropLast(2).toInt()
if (p2 != -1) {
p1 += 8
}
(80 - p1) / 80f
}.getOrDefault(RATING_UNKNOWN)
}
private fun String.cleanupTitle(): String {
val result = StringBuilder(length)
var skip = false
for (c in this) {
when {
c == '[' -> skip = true
c == ']' -> skip = false
c.isWhitespace() && result.isEmpty() -> continue
!skip -> result.append(c)
}
}
while (result.lastOrNull()?.isWhitespace() == true) {
result.deleteCharAt(result.lastIndex)
}
return result.toString()
}
private fun String.cssUrl(): String? {
val fromIndex = indexOf("url(")
if (fromIndex == -1) {
return null
}
val toIndex = indexOf(')', startIndex = fromIndex)
return if (toIndex == -1) {
null
} else {
substring(fromIndex + 4, toIndex).trim()
}
}
private fun tagIdByClass(classNames: Collection<String>): String? {
val className = classNames.find { x -> x.startsWith("ct") } ?: return null
val num = className.drop(2).toIntOrNull(16) ?: return null
return 2.0.pow(num).toInt().toString()
}
private fun getNextTimestamp(root: Element): Long {
return root.getElementById("unext")
?.attrAsAbsoluteUrlOrNull("href")
?.toHttpUrlOrNull()
?.queryParameter("next")
?.toLongOrNull() ?: 1
}
private fun MangaListFilter.Advanced.toSearchQuery(): String? {
val joiner = StringUtil.StringJoiner(" ")
for (tag in tags) {
if (tag.key.isNumeric()) {
continue
}
joiner.add("tag:\"")
joiner.append(tag.key)
joiner.append("\"$")
}
for (tag in tagsExclude) {
if (tag.key.isNumeric()) {
continue
}
joiner.add("-tag:\"")
joiner.append(tag.key)
joiner.append("\"$")
}
locale?.let { lc ->
joiner.add("language:\"")
joiner.append(lc.toLanguagePath())
joiner.append("\"$")
}
return joiner.complete().takeUnless { it.isEmpty() }
}
context: MangaLoaderContext,
) : PagedMangaParser(context, MangaParserSource.EXHENTAI, pageSize = 25), MangaParserAuthProvider, Interceptor {
override val availableSortOrders: Set<SortOrder> = setOf(SortOrder.NEWEST)
override val configKeyDomain: ConfigKey.Domain
get() {
val isAuthorized = checkAuth()
return ConfigKey.Domain(
if (isAuthorized) DOMAIN_AUTHORIZED else DOMAIN_UNAUTHORIZED,
if (isAuthorized) DOMAIN_UNAUTHORIZED else DOMAIN_AUTHORIZED,
)
}
override val authUrl: String
get() = "https://${domain}/bounce_login.php"
private val ratingPattern = Regex("-?[0-9]+px")
private val titleCleanupPattern = Regex("(\\[.*?]|\\([C0-9]*\\))")
private val spacesCleanupPattern = Regex("(^\\s+|\\s+\$|\\s+(?=\\s))")
private val authCookies = arrayOf("ipb_member_id", "ipb_pass_hash")
private val suspiciousContentKey = ConfigKey.ShowSuspiciousContent(false)
private val nextPages = MutableIntObjectMap<MutableIntLongMap>()
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
isSearchWithFiltersSupported = true,
isAuthorSearchSupported = true,
)
override suspend fun isAuthorized(): Boolean = checkAuth()
init {
context.cookieJar.insertCookies(DOMAIN_AUTHORIZED, "nw=1", "sl=dm_2")
context.cookieJar.insertCookies(DOMAIN_UNAUTHORIZED, "nw=1", "sl=dm_2")
paginator.firstPage = 0
searchPaginator.firstPage = 0
}
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = mapTags(),
availableContentTypes = EnumSet.of(
ContentType.DOUJINSHI,
ContentType.MANGA,
ContentType.ARTIST_CG,
ContentType.GAME_CG,
ContentType.COMICS,
ContentType.IMAGE_SET,
ContentType.OTHER,
),
availableLocales = setOf(
Locale.JAPANESE,
Locale.ENGLISH,
Locale.CHINESE,
Locale("nl"),
Locale.FRENCH,
Locale.GERMAN,
Locale("hu"),
Locale.ITALIAN,
Locale("kr"),
Locale("pl"),
Locale("pt"),
Locale("ru"),
Locale("es"),
Locale("th"),
Locale("vi"),
),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
return getListPage(page, order, filter, updateDm = false)
}
private suspend fun getListPage(
page: Int,
order: SortOrder,
filter: MangaListFilter,
updateDm: Boolean,
): List<Manga> {
val next = synchronized(nextPages) {
nextPages[filter.hashCode()]?.getOrDefault(page, 0L) ?: 0L
}
if (page > 0 && next == 0L) {
assert(false) { "Page timestamp not found" }
return emptyList()
}
val url = urlBuilder()
url.addEncodedQueryParameter("next", next.toString())
url.addQueryParameter("f_search", filter.toSearchQuery())
val fCats = filter.types.toFCats()
if (fCats != 0) {
url.addEncodedQueryParameter("f_cats", (1023 - fCats).toString())
}
if (updateDm) {
// by unknown reason cookie "sl=dm_2" is ignored, so, we should request it again
url.addQueryParameter("inline_set", "dm_e")
}
url.addQueryParameter("advsearch", "1")
if (config[suspiciousContentKey]) {
url.addQueryParameter("f_sh", "on")
}
val body = webClient.httpGet(url.build()).parseHtml().body()
val root = body.selectFirst("table.itg")?.selectFirst("tbody")
if (root == null) {
if (updateDm) {
if (body.getElementsContainingText("No hits found").isNotEmpty()) {
return emptyList()
} else {
body.parseFailed("Cannot find root")
}
} else {
return getListPage(page, order, filter, updateDm = true)
}
}
val nextTimestamp = getNextTimestamp(body)
synchronized(nextPages) {
nextPages.getOrPut(filter.hashCode()) {
MutableIntLongMap()
}.put(page + 1, nextTimestamp)
}
return root.children().mapNotNull { tr ->
if (tr.childrenSize() != 2) return@mapNotNull null
val (td1, td2) = tr.children()
val gLink = td2.selectFirstOrThrow("div.glink")
val a = gLink.parents().select("a").first() ?: gLink.parseFailed("link not found")
val href = a.attrAsRelativeUrl("href")
val tagsDiv = gLink.nextElementSibling() ?: gLink.parseFailed("tags div not found")
val rawTitle = gLink.text()
val author = tagsDiv.getElementsContainingOwnText("artist:").first()
?.nextElementSibling()?.textOrNull()
Manga(
id = generateUid(href),
title = rawTitle.cleanupTitle(),
altTitles = emptySet(),
url = href,
publicUrl = a.absUrl("href"),
rating = td2.selectFirst("div.ir")?.parseRating() ?: RATING_UNKNOWN,
contentRating = ContentRating.ADULT,
coverUrl = td1.selectFirst("img")?.attrAsAbsoluteUrlOrNull("src"),
tags = tagsDiv.parseTags(),
state = when {
rawTitle.contains("(ongoing)", ignoreCase = true) -> MangaState.ONGOING
else -> null
},
authors = setOfNotNull(author),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val root = doc.body().selectFirstOrThrow("div.gm")
val cover = root.getElementById("gd1")?.children()?.first()
val title = root.getElementById("gd2")
val tagList = root.getElementById("taglist")
val tabs = doc.body().selectFirst("table.ptt")?.selectFirst("tr")
val gd3 = root.getElementById("gd3")
val lang = gd3
?.selectFirst("tr:contains(Language)")
?.selectFirst(".gdt2")?.ownTextOrNull()
val uploadDate = gd3
?.selectFirst("tr:contains(Posted)")
?.selectFirst(".gdt2")?.ownTextOrNull()
.let { SimpleDateFormat("yyyy-MM-dd HH:mm", sourceLocale).parseSafe(it) }
val uploader = gd3
?.getElementsByAttributeValueContaining("href", "/uploader/")
?.firstOrNull()
?.ownTextOrNull()
val tags = tagList?.parseTags().orEmpty()
return manga.copy(
title = title?.getElementById("gn")?.text()?.cleanupTitle() ?: manga.title,
altTitles = setOfNotNull(title?.getElementById("gj")?.text()?.cleanupTitle()?.nullIfEmpty()),
publicUrl = doc.baseUri().ifEmpty { manga.publicUrl },
rating = root.getElementById("rating_label")?.text()
?.substringAfterLast(' ')
?.toFloatOrNull()
?.div(5f) ?: manga.rating,
largeCoverUrl = cover?.styleValueOrNull("background")?.cssUrl(),
tags = manga.tags + tags,
description = tagList?.select("tr")?.joinToString("<br>") { tr ->
val (tc, td) = tr.children()
val subTags = td.select("a").joinToString { it.html() }
"<b>${tc.html()}</b> $subTags"
},
chapters = tabs?.select("a")?.findLast { a ->
a.text().toIntOrNull() != null
}?.let { a ->
val count = a.text().toInt()
val chapters = ChaptersListBuilder(count)
for (i in 1..count) {
val url = "${manga.url}?p=${i - 1}"
chapters += MangaChapter(
id = generateUid(url),
title = null,
number = i.toFloat(),
volume = 0,
url = url,
uploadDate = uploadDate,
source = source,
scanlator = uploader,
branch = lang,
)
}
chapters.toList()
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
val root = doc.body().requireElementById("gdt")
return root.select("a").map { a ->
val url = a.attrAsRelativeUrl("href")
MangaPage(
id = generateUid(url),
url = url,
preview = a.children().firstOrNull()?.extractPreview(),
source = source,
)
}
}
override suspend fun getPageUrl(page: MangaPage): String {
val doc = webClient.httpGet(page.url.toAbsoluteUrl(domain)).parseHtml()
return doc.body().requireElementById("img").attrAsAbsoluteUrl("src")
}
@Suppress("SpellCheckingInspection")
private val tags: String
get() = "ahegao,anal,angel,apron,bandages,bbw,bdsm,beauty mark,big areolae,big ass,big breasts,big clit,big lips," +
"big nipples,bikini,blackmail,bloomers,blowjob,bodysuit,bondage,breast expansion,bukkake,bunny girl,business suit," +
"catgirl,centaur,cheating,chinese dress,christmas,collar,corset,cosplaying,cowgirl,crossdressing,cunnilingus," +
"dark skin,daughter,deepthroat,defloration,demon girl,double penetration,dougi,dragon,drunk,elf,exhibitionism,farting," +
"females only,femdom,filming,fingering,fishnets,footjob,fox girl,furry,futanari,garter belt,ghost,giantess," +
"glasses,gloves,goblin,gothic lolita,growth,guro,gyaru,hair buns,hairy,hairy armpits,handjob,harem,hidden sex," +
"horns,huge breasts,humiliation,impregnation,incest,inverted nipples,kemonomimi,kimono,kissing,lactation," +
"latex,leg lock,leotard,lingerie,lizard girl,maid,masked face,masturbation,midget,miko,milf,mind break," +
"mind control,monster girl,mother,muscle,nakadashi,netorare,nose hook,nun,nurse,oil,paizuri,panda girl," +
"pantyhose,piercing,pixie cut,policewoman,ponytail,pregnant,rape,rimjob,robot,scat,lolicon,schoolgirl uniform," +
"sex toys,shemale,sister,small breasts,smell,sole dickgirl,sole female,squirting,stockings,sundress,sweating," +
"swimsuit,swinging,tail,tall girl,teacher,tentacles,thigh high boots,tomboy,transformation,twins,twintails," +
"unusual pupils,urination,vore,vtuber,widow,wings,witch,wolf girl,x-ray,yuri,zombie,sole male,males only,yaoi," +
"tomgirl,tall man,oni,shotacon,prostate massage,policeman,males only,huge penis,fox boy,feminization,dog boy,dickgirl on male,big penis"
private fun mapTags(): Set<MangaTag> {
val tagElements = tags.split(",")
val result = ArraySet<MangaTag>(tagElements.size)
for (tag in tagElements) {
val el = tag.trim()
if (el.isEmpty()) continue
result += MangaTag(
title = el.toTitleCase(Locale.ENGLISH),
key = el,
source = source,
)
}
return result
}
override fun intercept(chain: Interceptor.Chain): Response {
val response = chain.proceed(chain.request())
if (response.headersContentLength(BANNED_RESPONSE_LENGTH) <= BANNED_RESPONSE_LENGTH) {
val text = response.peekBody(BANNED_RESPONSE_LENGTH).use { it.string() }
if (text.contains("IP address has been temporarily banned", ignoreCase = true)) {
val hours = Regex("([0-9]+) hours?").find(text)?.groupValues?.getOrNull(1)?.toLongOrNull() ?: 0
val minutes = Regex("([0-9]+) minutes?").find(text)?.groupValues?.getOrNull(1)?.toLongOrNull() ?: 0
val seconds = Regex("([0-9]+) seconds?").find(text)?.groupValues?.getOrNull(1)?.toLongOrNull() ?: 0
response.closeQuietly()
throw TooManyRequestExceptions(
url = response.request.url.toString(),
retryAfter = TimeUnit.HOURS.toMillis(hours)
+ TimeUnit.MINUTES.toMillis(minutes)
+ TimeUnit.SECONDS.toMillis(seconds),
)
}
}
val imageRect = response.request.url.fragment?.split(',')
if (imageRect != null && imageRect.size == 4) {
// rect: top,left,right,bottom
return context.redrawImageResponse(response) { bitmap ->
val srcRect = Rect(
left = imageRect[0].toInt(),
top = imageRect[1].toInt(),
right = imageRect[2].toInt(),
bottom = imageRect[3].toInt(),
)
val dstRect = Rect(0, 0, srcRect.width, srcRect.height)
val result = context.createBitmap(dstRect.width, dstRect.height)
result.drawBitmap(bitmap, srcRect, dstRect)
result
}
}
return response
}
private fun Locale.toLanguagePath() = when (language) {
else -> getDisplayLanguage(Locale.ENGLISH).lowercase()
}
override suspend fun getUsername(): String {
val doc = webClient.httpGet("https://forums.$DOMAIN_UNAUTHORIZED/").parseHtml().body()
val username = doc.getElementById("userlinks")
?.getElementsByAttributeValueContaining("href", "showuser=")
?.firstOrNull()
?.ownText()
?: if (doc.getElementById("userlinksguest") != null) {
throw AuthRequiredException(source)
} else {
doc.parseFailed()
}
return username
}
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
keys.add(suspiciousContentKey)
}
override suspend fun getRelatedManga(seed: Manga): List<Manga> {
val query = seed.title
return getListPage(
page = 0,
order = defaultSortOrder,
filter = MangaListFilter(query = query),
)
}
private fun isAuthorized(domain: String): Boolean {
val cookies = context.cookieJar.getCookies(domain).mapToSet { x -> x.name }
return authCookies.all { it in cookies }
}
private fun Element.parseRating(): Float {
return runCatching {
val style = requireNotNull(attr("style"))
val (v1, v2) = ratingPattern.findAll(style).toList()
var p1 = v1.groupValues.first().dropLast(2).toInt()
val p2 = v2.groupValues.first().dropLast(2).toInt()
if (p2 != -1) {
p1 += 8
}
(80 - p1) / 80f
}.getOrDefault(RATING_UNKNOWN)
}
private fun String.cleanupTitle(): String {
return replace(titleCleanupPattern, "")
.replace(spacesCleanupPattern, "")
}
private fun Element.parseTags(): Set<MangaTag> {
fun Element.parseTag() = textOrNull()?.let {
MangaTag(title = it.toTitleCase(Locale.ENGLISH), key = it, source = source)
}
val result = ArraySet<MangaTag>()
for (prefix in TAG_PREFIXES) {
getElementsByAttributeValueStarting("id", "ta_$prefix").mapNotNullTo(result, Element::parseTag)
getElementsByAttributeValueStarting("title", prefix).mapNotNullTo(result, Element::parseTag)
}
return result
}
private fun Element.extractPreview(): String? {
val bg = backgroundOrNull() ?: return null
return buildString {
append(bg.url)
append('#')
// rect: left,top,right,bottom
append(bg.left)
append(',')
append(bg.top)
append(',')
append(bg.right)
append(',')
append(bg.bottom)
}
}
private fun getNextTimestamp(root: Element): Long {
return root.getElementById("unext")
?.attrAsAbsoluteUrlOrNull("href")
?.toHttpUrlOrNull()
?.queryParameter("next")
?.toLongOrNull() ?: 1
}
private fun MangaListFilter.toSearchQuery(): String? {
if (isEmpty()) {
return null
}
val joiner = StringUtil.StringJoiner(" ")
if (!query.isNullOrEmpty()) {
joiner.add(query)
}
for (tag in tags) {
if (tag.key.isNumeric()) {
continue
}
joiner.add("tag:\"")
joiner.append(tag.key)
joiner.append("\"$")
}
for (tag in tagsExclude) {
if (tag.key.isNumeric()) {
continue
}
joiner.add("-tag:\"")
joiner.append(tag.key)
joiner.append("\"$")
}
locale?.let { lc ->
joiner.add("language:\"")
joiner.append(lc.toLanguagePath())
joiner.append("\"$")
}
if (!author.isNullOrEmpty()) {
joiner.add("artist:\"")
joiner.append(author)
joiner.append("\"$")
}
return joiner.complete().nullIfEmpty()
}
private fun Collection<ContentType>.toFCats(): Int = fold(0) { acc, ct ->
val cat: Int = when (ct) {
ContentType.DOUJINSHI -> 2
ContentType.MANGA -> 4
ContentType.ARTIST_CG -> 8
ContentType.GAME_CG -> 16
ContentType.COMICS -> 512
ContentType.IMAGE_SET -> 32
else -> 449 // 1 or 64 or 128 or 256
}
acc or cat
}
private fun checkAuth(): Boolean {
val authorized = isAuthorized(DOMAIN_UNAUTHORIZED)
if (authorized) {
if (!isAuthorized(DOMAIN_AUTHORIZED)) {
context.cookieJar.copyCookies(
DOMAIN_UNAUTHORIZED,
DOMAIN_AUTHORIZED,
authCookies,
)
context.cookieJar.insertCookies(DOMAIN_AUTHORIZED, "yay=louder")
}
return true
}
return false
}
}

@ -11,68 +11,89 @@ import org.json.JSONArray
import org.json.JSONObject
import org.jsoup.Jsoup
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.AbstractMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
import org.koitharu.kotatsu.parsers.util.json.mapJSON
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.security.MessageDigest
import java.text.SimpleDateFormat
import java.util.*
import java.util.EnumSet
import java.util.LinkedList
import java.util.Locale
import kotlin.math.min
@OptIn(ExperimentalUnsignedTypes::class)
@MangaSourceParser("HITOMILA", "Hitomi.La", type = ContentType.HENTAI)
class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSource.HITOMILA) {
internal class HitomiLaParser(context: MangaLoaderContext) : AbstractMangaParser(context, MangaParserSource.HITOMILA) {
override val configKeyDomain = ConfigKey.Domain("hitomi.la")
private val ltnBaseUrl get() = "https://${getDomain("ltn")}"
private val cdnDomain = "gold-usergeneratedcontent.net"
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
private val ltnBaseUrl get() = "https://ltn.$cdnDomain"
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.NEWEST,
SortOrder.POPULARITY,
SortOrder.POPULARITY_TODAY,
SortOrder.POPULARITY_WEEK,
SortOrder.POPULARITY_MONTH,
SortOrder.POPULARITY_YEAR,
)
private val localeMap: Map<Locale, String> = mapOf(
Locale("id") to "indonesian",
Locale("jv") to "javanese",
Locale("ca") to "catalan",
Locale("ceb") to "cebuano",
Locale("cs") to "czech",
Locale("da") to "danish",
Locale("de") to "german",
Locale("et") to "estonian",
Locale.forLanguageTag("id") to "indonesian",
Locale.forLanguageTag("jv") to "javanese",
Locale.forLanguageTag("ca") to "catalan",
Locale.forLanguageTag("ceb") to "cebuano",
Locale.forLanguageTag("cs") to "czech",
Locale.forLanguageTag("da") to "danish",
Locale.forLanguageTag("de") to "german",
Locale.forLanguageTag("et") to "estonian",
Locale.ENGLISH to "english",
Locale("es") to "spanish",
Locale("eo") to "esperanto",
Locale("fr") to "french",
Locale("it") to "italian",
Locale("hi") to "hindi",
Locale("hu") to "hungarian",
Locale("pl") to "polish",
Locale("pt") to "portuguese",
Locale("vi") to "vietnamese",
Locale("tr") to "turkish",
Locale("ru") to "russian",
Locale("uk") to "ukrainian",
Locale("ar") to "arabic",
Locale.forLanguageTag("es") to "spanish",
Locale.forLanguageTag("eo") to "esperanto",
Locale.forLanguageTag("fr") to "french",
Locale.forLanguageTag("it") to "italian",
Locale.forLanguageTag("hi") to "hindi",
Locale.forLanguageTag("hu") to "hungarian",
Locale.forLanguageTag("pl") to "polish",
Locale.forLanguageTag("pt") to "portuguese",
Locale.forLanguageTag("vi") to "vietnamese",
Locale.forLanguageTag("tr") to "turkish",
Locale.forLanguageTag("ru") to "russian",
Locale.forLanguageTag("uk") to "ukrainian",
Locale.forLanguageTag("ar") to "arabic",
Locale.KOREAN to "korean",
Locale.CHINESE to "chinese",
Locale.JAPANESE to "japanese",
)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableLocales = localeMap.keys,
)
private fun Locale?.getSiteLang(): String = when (this) {
null -> "all"
else -> localeMap[this] ?: "all"
}
override suspend fun getAvailableLocales(): Set<Locale> = localeMap.keys
override suspend fun getAvailableTags(): Set<MangaTag> = coroutineScope {
private suspend fun fetchAvailableTags(): Set<MangaTag> = coroutineScope {
('a'..'z').map { alphabet ->
async {
val doc = webClient.httpGet("https://$domain/alltags-$alphabet.html").parseHtml()
@ -104,14 +125,12 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
private var cachedSearchIds: List<Int> = emptyList()
override suspend fun getList(
offset: Int,
filter: MangaListFilter?,
): List<Manga> = when (filter) {
is MangaListFilter.Advanced -> {
override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> = when {
filter.query.isNullOrEmpty() -> {
if (filter.tags.isEmpty()) {
when (filter.sortOrder) {
SortOrder.POPULARITY -> {
when (order) {
SortOrder.POPULARITY_TODAY -> {
getGalleryIDsFromNozomi(
"popular",
"today",
@ -120,6 +139,33 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
)
}
SortOrder.POPULARITY_WEEK -> {
getGalleryIDsFromNozomi(
"popular",
"week",
filter.locale.getSiteLang(),
offset.nextOffsetRange(),
)
}
SortOrder.POPULARITY_MONTH -> {
getGalleryIDsFromNozomi(
"popular",
"month",
filter.locale.getSiteLang(),
offset.nextOffsetRange(),
)
}
SortOrder.POPULARITY_YEAR -> {
getGalleryIDsFromNozomi(
"popular",
"year",
filter.locale.getSiteLang(),
offset.nextOffsetRange(),
)
}
else -> {
getGalleryIDsFromNozomi(null, "index", filter.locale.getSiteLang(), offset.nextOffsetRange())
}
@ -129,7 +175,7 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
cachedSearchIds =
hitomiSearch(
filter.tags.joinToString(" ") { it.key },
filter.sortOrder == SortOrder.POPULARITY,
order,
filter.locale.getSiteLang(),
).toList()
}
@ -137,14 +183,12 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
}
}
is MangaListFilter.Search -> {
else -> {
if (offset == 0) {
cachedSearchIds = hitomiSearch(filter.query, filter.sortOrder == SortOrder.POPULARITY).toList()
cachedSearchIds = hitomiSearch(filter.query, order).toList()
}
cachedSearchIds.subList(offset, min(offset + 25, cachedSearchIds.size))
}
else -> getGalleryIDsFromNozomi(null, "popular", "all", offset.nextOffsetRange())
}.toMangaList()
private fun Int.nextOffsetRange(): LongRange {
@ -154,7 +198,7 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
private suspend fun hitomiSearch(
query: String,
sortByPopularity: Boolean = false,
sortByPopularity: SortOrder = SortOrder.UPDATED,
language: String = "all",
): Set<Int> =
coroutineScope {
@ -162,7 +206,7 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
.trim()
.replace(Regex("""^\?"""), "")
.lowercase()
.split(Regex("\\s+"))
.splitByWhitespace()
.map {
it.replace('_', ' ')
}
@ -195,7 +239,11 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
}
val results = when {
sortByPopularity -> getGalleryIDsFromNozomi(null, "popular", language)
sortByPopularity == SortOrder.UPDATED -> getGalleryIDsFromNozomi(null, "index", language)
sortByPopularity == SortOrder.POPULARITY_TODAY -> getGalleryIDsFromNozomi("popular", "today", language)
sortByPopularity == SortOrder.POPULARITY_WEEK -> getGalleryIDsFromNozomi("popular", "week", language)
sortByPopularity == SortOrder.POPULARITY_MONTH -> getGalleryIDsFromNozomi("popular", "month", language)
sortByPopularity == SortOrder.POPULARITY_YEAR -> getGalleryIDsFromNozomi("popular", "year", language)
positiveTerms.isEmpty() -> getGalleryIDsFromNozomi(null, "index", language)
else -> emptySet()
}.toMutableSet()
@ -378,7 +426,7 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
return nozomi
}
private val galleriesIndexVersion = SuspendLazy {
private val galleriesIndexVersion = suspendLazy {
webClient.httpGet("$ltnBaseUrl/galleriesindex/version?_=${System.currentTimeMillis()}").parseRaw()
}
@ -401,9 +449,7 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
for (i in 0.until(numberOfKeys)) {
val keySize = buffer.int
if (keySize == 0 || keySize > 32) {
throw Exception("fatal: !keySize || keySize > 32")
}
check(keySize in 1..32) { "Invalid key size $keySize" }
keys.add(uData.sliceArray(buffer.position().until(buffer.position() + keySize)))
buffer.position(buffer.position() + keySize)
@ -473,19 +519,18 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
title = doc.selectFirstOrThrow("h1").text(),
url = id.toString(),
coverUrl =
"https:" +
doc.selectFirstOrThrow("picture > source")
.attr("data-srcset")
.substringBefore(" "),
"https:" +
doc.selectFirstOrThrow("picture > img")
.attr("data-src"),
publicUrl =
doc.selectFirstOrThrow("h1 > a")
.attrAsRelativeUrl("href")
.toAbsoluteUrl(domain),
author = null,
doc.selectFirstOrThrow("h1 > a")
.attrAsRelativeUrl("href")
.toAbsoluteUrl(domain),
authors = emptySet(),
tags = emptySet(),
isNsfw = true,
contentRating = ContentRating.ADULT,
rating = RATING_UNKNOWN,
altTitle = null,
altTitles = emptySet(),
state = null,
source = source,
)
@ -499,51 +544,53 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
.parseRaw()
.substringAfter("var galleryinfo = ")
.let(::JSONObject)
val author =
json.optJSONArray("artists")
?.mapJSON { it.getString("artist").toCamelCase() }
?.joinToString()
return manga.copy(
title = json.getString("title"),
largeCoverUrl =
json.getJSONArray("files").getJSONObject(0).let {
val hash = it.getString("hash")
val commonId = commonImageId()
val imageId = imageIdFromHash(hash)
val subDomain = 'a' + subdomainOffset(imageId)
"https://${getDomain("${subDomain}a")}/webp/$commonId$imageId/$hash.webp"
},
author =
json.optJSONArray("artists")
?.mapJSON { it.getString("artist").toCamelCase() }
?.joinToString(),
json.getJSONArray("files").getJSONObject(0).let {
val hash = it.getString("hash")
val imageId = imageIdFromHash(hash)
val subDomain = 'a' + subdomainOffset(imageId)
"https://${subDomain}tn.$cdnDomain/webpbigtn/${thumbPathFromHash(hash)}/$hash.webp"
},
authors = setOfNotNull(author),
publicUrl = json.getString("galleryurl").toAbsoluteUrl(domain),
tags =
buildSet {
json.optJSONArray("characters")
?.mapToTags("character")
?.let(::addAll)
json.optJSONArray("tags")
?.mapToTags("tag")
?.let(::addAll)
json.optJSONArray("artists")
?.mapToTags("artist")
?.let(::addAll)
json.optJSONArray("parodys")
?.mapToTags("parody")
?.let(::addAll)
json.optJSONArray("groups")
?.mapToTags("group")
?.let(::addAll)
},
buildSet
{
json.optJSONArray("characters")
?.mapToTags("character")
?.let(::addAll)
json.optJSONArray("tags")
?.mapToTags("tag")
?.let(::addAll)
json.optJSONArray("artists")
?.mapToTags("artist")
?.let(::addAll)
json.optJSONArray("parodys")
?.mapToTags("parody")
?.let(::addAll)
json.optJSONArray("groups")
?.mapToTags("group")
?.let(::addAll)
},
chapters = listOf(
MangaChapter(
id = generateUid(manga.url),
url = manga.url,
name = json.getString("title"),
title = json.getStringOrNull("title"),
scanlator = json.getString("type").toTitleCase(),
number = 1,
number = 1f,
volume = 0,
branch = json.getString("language_localname"),
source = source,
uploadDate = dateFormat.tryParse(json.getString("date").substringBeforeLast("-")),
uploadDate = dateFormat.parseSafe(json.getString("date").substringBeforeLast("-")),
),
),
)
@ -556,15 +603,15 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
mapJSON {
MangaTag(
title =
it.getString(key).toCamelCase().let { title ->
if (it.getStringOrNull("female")?.toIntOrNull() == 1) {
"$title"
} else if (it.getStringOrNull("male")?.toIntOrNull() == 1) {
"$title"
} else {
title
}
},
it.getString(key).toCamelCase().let { title ->
if (it.getStringOrNull("female")?.toIntOrNull() == 1) {
"$title"
} else if (it.getStringOrNull("male")?.toIntOrNull() == 1) {
"$title"
} else {
title
}
},
key = it.getString("url").tagUrlToTag(),
source = source,
).let(tags::add)
@ -610,27 +657,27 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
val hash = image.getString("hash")
val commonId = commonImageId()
val imageId = imageIdFromHash(hash)
val subDomain = 'a' + subdomainOffset(imageId)
val subDomain = subdomainOffset(imageId) + 1
val thumbSubdomain = 'a' + subdomainOffset(imageId)
MangaPage(
id = generateUid(hash),
url = "https://${getDomain("${subDomain}a")}/webp/$commonId$imageId/$hash.webp",
preview = "https://${getDomain("${subDomain}tn")}/webpsmalltn/${thumbPathFromHash(hash)}/$hash.webp",
url = "https://a${subDomain}.$cdnDomain/$commonId$imageId/$hash.avif",
preview = "https://${thumbSubdomain}tn.$cdnDomain/webpsmallsmalltn/${thumbPathFromHash(hash)}/$hash.webp",
source = source,
)
}
}
// / --->
// / --->
private var scriptLastRetrieval: Long? = null
private var scriptLastRetrieval: Long = -1L
private val mutex = Mutex()
private var subdomainOffsetDefault = 0
private val subdomainOffsetMap = mutableMapOf<Int, Int>()
private var commonImageId = ""
private suspend fun refreshScript() = mutex.withLock {
if (scriptLastRetrieval == null || (scriptLastRetrieval!! + 60000) < System.currentTimeMillis()) {
if (scriptLastRetrieval == -1L || (scriptLastRetrieval + 60000) < System.currentTimeMillis()) {
val ggScript = webClient.httpGet("$ltnBaseUrl/gg.js?_=${System.currentTimeMillis()}").parseRaw()
subdomainOffsetDefault = Regex("var o = (\\d)").find(ggScript)!!.groupValues[1].toInt()
@ -671,31 +718,51 @@ class HitomiLaParser(context: MangaLoaderContext) : MangaParser(context, MangaSo
return hash.replace(Regex("""^.*(..)(.)$"""), "$2/$1")
}
private suspend fun subdomainFromURL(url: String, base: String? = null): String {
var retval = "b"
// rewrite_tn_paths <-- common.js
private suspend fun rewriteTnPaths(html: String): String {
val thumbUrlRegex = Regex(
"""(?<protocol>//)(?<host>[a-z0-9.-]+\.(?:hitomi\.la|${Regex.escape(cdnDomain)}))/(?<pathAfterHost>(?:avif|webp)?(?:small)?(?:big|small|medium)tn/[0-9a-f]/[0-9a-f]{2}/[0-9a-f]{64}\.(?:webp|avif|gif|png|jpe?g))""",
)
if (!base.isNullOrBlank())
retval = base
var resultHtml = html
thumbUrlRegex.findAll(html).forEach { matchResult ->
val originalUrl = matchResult.value
val groups = matchResult.groups
val regex = Regex("""/[0-9a-f]{61}([0-9a-f]{2})([0-9a-f])""")
val hashMatch = regex.find(url) ?: return "a"
val imageId = hashMatch.groupValues.let { it[2] + it[1] }.toIntOrNull(16)
val pathAfterHost = groups["pathAfterHost"]?.value ?: return@forEach
val newTnSubdomain = subdomainFromURL(originalUrl, "tn")
val correctedUrl = "${groups["protocol"]!!.value}$newTnSubdomain.$cdnDomain/$pathAfterHost"
if (imageId != null) {
retval = ('a' + subdomainOffset(imageId)).toString() + retval
if (originalUrl != correctedUrl) {
resultHtml = resultHtml.replace(originalUrl, correctedUrl)
}
}
return retval
return resultHtml
}
// rewrite_tn_paths <-- common.js
private suspend fun rewriteTnPaths(html: String): String {
val tnRegex = Regex("""//tn\.hitomi\.la/[^/]+/[0-9a-f]/[0-9a-f]{2}/[0-9a-f]{64}""")
val url = tnRegex.find(html)?.value ?: return html
val newSubdomain = subdomainFromURL(url, "tn")
val newUrl = url.replace(Regex("""//..?\.hitomi\.la/"""), "//${getDomain(newSubdomain)}/")
private suspend fun subdomainFromURL(url: String, base: String?): String {
val resultSubdomain = base ?: "b"
// This regex extracts the last 3 hex characters from the hash in the URL
// The hash is 64 characters, so we look for the 61st character onward
val hashRegex = Regex("""/([0-9a-f]{61}[0-9a-f]{3})[./]""")
val fullHashMatch = hashRegex.find(url)
?: // If no hash is found, default to "a" + base (typically "atn")
return "a$resultSubdomain"
val fullHash = fullHashMatch.groupValues[1]
val lastThreeChars = fullHash.takeLast(3)
val lastDigit = lastThreeChars.last()
val lastTwoDigits = lastThreeChars.take(2)
return html.replace(tnRegex, newUrl)
val imageId = "$lastDigit$lastTwoDigits".toIntOrNull(16)
return if (imageId != null) {
('a' + subdomainOffset(imageId)).toString() + resultSubdomain
} else {
"a$resultSubdomain"
}
}
private fun String.toTagTitle(): String {

@ -0,0 +1,140 @@
package org.koitharu.kotatsu.parsers.site.all
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.generateUid
import org.koitharu.kotatsu.parsers.util.parseHtml
import org.koitharu.kotatsu.parsers.util.selectFirstOrThrow
import org.koitharu.kotatsu.parsers.util.parseSafe
import java.text.SimpleDateFormat
import java.util.Locale
@MangaSourceParser("HOLOEARTH", "HoloEarth")
internal class HoloEarthParser(context: MangaLoaderContext) :
PagedMangaParser(context, MangaParserSource.HOLOEARTH, 3) {
override val configKeyDomain: ConfigKey.Domain
get() = ConfigKey.Domain("holoearth.com")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = setOf(SortOrder.NEWEST)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = false,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableLocales = setOf(
Locale("en"),
Locale.JAPANESE,
Locale("id"),
),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://$domain")
filter.locale?.let {
append(
when (it) {
Locale("en") -> "/en"
Locale.JAPANESE -> ""
Locale("id") -> "/id"
else -> "" // default
}
)
}
append("/alt/holonometria/manga")
}
val doc = webClient.httpGet(url).parseHtml()
val root = doc.body().selectFirstOrThrow(".manga__list")
val mangaList = root.select("li .manga__item-inner")
if (mangaList.isEmpty()) return emptyList()
return mangaList.mapNotNull { li ->
val coverUrl = li.getElementsByTag("img").attr("src")
val title = li.getElementsByClass("manga__title").text()
val altTitle = li.getElementsByClass("manga__copy").text()
val description = li.getElementsByClass("manga__caption").text()
val url = li.getElementsByTag("a").attr("href")
Manga(
id = generateUid(url),
title = title,
altTitles = setOf(altTitle),
url = url,
publicUrl = url,
rating = RATING_UNKNOWN,
contentRating = null,
coverUrl = coverUrl,
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
description = description,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url).parseHtml()
val dateFormat = SimpleDateFormat("yyyy.MM.dd", Locale.US)
val root = doc.body().selectFirstOrThrow(".manga-detail__wrapper")
val coverUrl = root.selectFirstOrThrow(".manga-detail__thumb img").attr("src")
val chapters = root.select(".manga-detail__list-item")
val mangaChapters = chapters.mapIndexed { index, li ->
val url = li.selectFirstOrThrow(".manga-detail__list-link").attr("href")
val title = li.selectFirstOrThrow(".manga-detail__list-title").text()
val dateStr = li.selectFirstOrThrow(".manga-detail__list-date").text()
val uploadDate = dateFormat.parseSafe(dateStr) ?: 0L
val scanlator = root.selectFirst(".manga-detail__person")?.text()
MangaChapter(
id = generateUid(url),
title = title,
number = index + 1f,
volume = 0,
url = url,
scanlator = scanlator,
uploadDate = uploadDate,
branch = null,
source = source,
)
}
return manga.copy(
coverUrl = coverUrl,
chapters = mangaChapters,
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url).parseHtml()
val imageList = doc.body().selectFirstOrThrow(".manga-detail__swiper-wrapper")
val images = imageList.select(".manga-detail__swiper-slide").reversed()
return images.mapNotNull { page ->
val img = page.selectFirst("img") ?: return@mapNotNull null
val src = img.attr("src")
MangaPage(
id = generateUid(src),
url = src,
preview = src,
source = source,
)
}
}
}

@ -7,39 +7,93 @@ import kotlinx.coroutines.coroutineScope
import org.jsoup.nodes.Element
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.util.*
@MangaSourceParser("IMHENTAI", "ImHentai", type = ContentType.HENTAI)
internal class ImHentai(context: MangaLoaderContext) :
PagedMangaParser(context, MangaSource.IMHENTAI, pageSize = 20) {
PagedMangaParser(context, MangaParserSource.IMHENTAI, pageSize = 20) {
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.UPDATED, SortOrder.POPULARITY, SortOrder.RATING)
override val configKeyDomain = ConfigKey.Domain("imhentai.xxx")
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableLocales = setOf(
Locale.ENGLISH,
Locale.JAPANESE,
Locale("es"),
Locale.FRENCH,
Locale("kr"),
Locale.GERMAN,
Locale("ru"),
),
availableContentTypes = EnumSet.of(
ContentType.MANGA,
ContentType.DOUJINSHI,
ContentType.COMICS,
ContentType.IMAGE_SET,
ContentType.ARTIST_CG,
ContentType.GAME_CG,
),
)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override suspend fun getListPage(
page: Int,
order: SortOrder,
filter: MangaListFilter,
): List<Manga> {
val url = buildString {
append("https://")
append(domain)
append("/search/?page=")
append(page.toString())
when (filter) {
is MangaListFilter.Search -> {
when {
!filter.query.isNullOrEmpty() -> {
append("&key=")
append(filter.query.urlEncoded())
}
is MangaListFilter.Advanced -> {
else -> {
if (filter.tags.isNotEmpty()) {
append("&key=")
append(filter.tags.joinToString(separator = ",") { it.key })
filter.tags.joinTo(this, separator = ",") { it.key }
}
var types = "&m=1&d=1&w=1&i=1&a=1&g=1"
if (filter.types.isNotEmpty()) {
types = "&m=0&d=0&w=0&i=0&a=0&g=0"
filter.types.forEach {
when (it) {
ContentType.MANGA -> types = types.replace("&m=0", "&m=1")
ContentType.DOUJINSHI -> types = types.replace("&d=0", "&d=1")
ContentType.COMICS -> types = types.replace("&w=0", "&w=1")
ContentType.IMAGE_SET -> types = types.replace("&i=0", "&i=1")
ContentType.ARTIST_CG -> types = types.replace("&a=0", "&a=1")
ContentType.GAME_CG -> types = types.replace("&g=0", "&g=1")
else -> {}
}
}
}
append(types)
var lang = "&en=1&jp=1&es=1&fr=1&kr=1&de=1&ru=1"
filter.locale?.let {
@ -48,17 +102,13 @@ internal class ImHentai(context: MangaLoaderContext) :
}
append(lang)
when (filter.sortOrder) {
when (order) {
SortOrder.UPDATED -> append("&lt=1&pp=0")
SortOrder.POPULARITY -> append("&lt=0&pp=1")
SortOrder.RATING -> append("&lt=0&pp=0")
else -> append("&lt=1&pp=0")
}
}
null -> {
append("&lt=1&pp=0")
}
}
}
@ -70,30 +120,30 @@ internal class ImHentai(context: MangaLoaderContext) :
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
coverUrl = a.selectFirst("img")?.src().orEmpty(),
coverUrl = a.selectFirst("img")?.src(),
title = div.selectFirst(".caption")?.text().orEmpty(),
altTitle = null,
altTitles = emptySet(),
rating = RATING_UNKNOWN,
tags = emptySet(),
author = null,
authors = emptySet(),
state = null,
source = source,
isNsfw = isNsfwSource,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
//Tags are deliberately reduced because there are too many and this slows down the application.
//only the most popular ones are taken.
override suspend fun getAvailableTags(): Set<MangaTag> {
private suspend fun fetchAvailableTags(): Set<MangaTag> {
return coroutineScope {
(1..3).map { page ->
async { getTags(page) }
async { fetchTagsPage(page) }
}
}.awaitAll().flattenTo(ArraySet(360))
}
private suspend fun getTags(page: Int): Set<MangaTag> {
private suspend fun fetchTagsPage(page: Int): Set<MangaTag> {
val url = "https://$domain/tags/popular/?page=$page"
val root = webClient.httpGet(url).parseHtml()
return root.parseTags()
@ -108,33 +158,30 @@ internal class ImHentai(context: MangaLoaderContext) :
)
}
override suspend fun getAvailableLocales(): Set<Locale> = setOf(
Locale.ENGLISH, Locale.JAPANESE, Locale("es"), Locale.FRENCH, Locale("kr"), Locale.GERMAN, Locale("ru"),
)
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val fullUrl = manga.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val author = doc.selectFirst("li:contains(Artists) a.tag")?.ownTextOrNull()
manga.copy(
tags = doc.body().select("li:contains(Tags) a.tag").mapNotNullToSet {
val href = it.attr("href").substringAfterLast("tag/").substringBeforeLast('/')
val name = it.html().substringBeforeLast("<span")
MangaTag(
key = href,
title = name,
title = it.ownText().toTitleCase(sourceLocale),
source = source,
)
},
author = doc.selectFirst("li:contains(Artists) a.tag")?.html()?.substringBefore("<span"),
authors = setOfNotNull(author),
chapters = listOf(
MangaChapter(
id = manga.id,
name = manga.title,
number = 1,
title = null,
number = 1f,
volume = 0,
url = manga.url,
scanlator = null,
uploadDate = 0,
branch = doc.selectFirst("li:contains(Language) a.tag")?.html()?.substringBeforeLast("<span"),
branch = doc.selectFirst("li:contains(Language) a.tag")?.ownTextOrNull()?.toTitleCase(sourceLocale),
source = source,
),
),
@ -151,15 +198,15 @@ internal class ImHentai(context: MangaLoaderContext) :
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
coverUrl = a.selectFirst("img")?.src().orEmpty(),
coverUrl = a.selectFirst("img")?.src(),
title = div.selectFirst(".caption")?.text().orEmpty(),
altTitle = null,
altTitles = emptySet(),
rating = RATING_UNKNOWN,
tags = emptySet(),
author = null,
authors = emptySet(),
state = null,
source = source,
isNsfw = false,
contentRating = null,
)
}
}
@ -167,20 +214,28 @@ internal class ImHentai(context: MangaLoaderContext) :
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
val totalPages = doc.selectFirstOrThrow(".pages").text().replace("Pages: ", "").toInt() + 1
val domainImg = doc.requireElementById("append_thumbs").selectFirstOrThrow("img").src()?.replace("1t.jpg", "")
val totalPages = doc.selectFirstOrThrow(".pages").text().replace("Pages: ", "").toInt()
val baseImg = doc.requireElementById("append_thumbs").selectFirstOrThrow("img")
val baseUrl = baseImg.selectFirstParentOrThrow("a").attrAsRelativeUrl("href").replace("/1/", "/\$/")
val baseThumbUrl = baseImg.src()?.replace("/1t.", "/\$t.")
val pages = ArrayList<MangaPage>(totalPages)
for (i in 1 until totalPages) {
val url = "$domainImg$i.jpg"
repeat(totalPages) { i ->
val url = baseUrl.replace("\$", (i + 1).toString())
pages.add(
MangaPage(
id = generateUid(url),
url = url,
preview = null,
preview = baseThumbUrl?.replace("\$", (i + 1).toString()),
source = source,
),
)
}
return pages
}
override suspend fun getPageUrl(page: MangaPage): String {
val doc = webClient.httpGet(page.url.toAbsoluteUrl(domain)).parseHtml()
val img = doc.body().requireElementById("gimg")
return img.requireSrc()
}
}

@ -0,0 +1,421 @@
package org.koitharu.kotatsu.parsers.site.all
import okhttp3.HttpUrl.Companion.toHttpUrl
import org.json.JSONObject
import org.jsoup.HttpStatusException
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.exception.ParseException
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.getIntOrDefault
import org.koitharu.kotatsu.parsers.util.json.getLongOrDefault
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
import org.koitharu.kotatsu.parsers.util.json.mapJSONNotNullToSet
import org.koitharu.kotatsu.parsers.util.suspendlazy.getOrDefault
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.net.HttpURLConnection
import java.text.SimpleDateFormat
import java.util.*
import org.koitharu.kotatsu.parsers.Broken
@Broken("Need to fix getPages, most manga don't have chapter images due to faulty fetch logic")
@MangaSourceParser("KOHARU", "Schale.network", type = ContentType.HENTAI)
internal class Koharu(context: MangaLoaderContext) :
PagedMangaParser(context, MangaParserSource.KOHARU, 24) {
override val configKeyDomain = ConfigKey.Domain("niyaniya.moe")
private val apiSuffix = "api.schale.network"
override val userAgentKey = ConfigKey.UserAgent(
"Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.7204.46 Mobile Safari/537.36",
)
private val authorsIds = suspendLazy { fetchAuthorsIds() }
private val preferredImageResolutionKey = ConfigKey.PreferredImageServer(
presetValues = mapOf(
"0" to "Lowest Quality",
"780" to "Low Quality (780px)",
"980" to "Medium Quality (980px)",
"1280" to "High Quality (1280px)",
"1600" to "Highest Quality (1600px)",
),
defaultValue = "1280",
)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
keys.add(preferredImageResolutionKey)
}
override fun getRequestHeaders() = super.getRequestHeaders().newBuilder()
.add("referer", "https://$domain/")
.add("origin", "https://$domain")
.build()
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.NEWEST,
SortOrder.POPULARITY,
SortOrder.POPULARITY_TODAY,
SortOrder.POPULARITY_WEEK,
SortOrder.ALPHABETICAL,
SortOrder.ALPHABETICAL_DESC,
SortOrder.RATING,
)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
isAuthorSearchSupported = true,
isSearchWithFiltersSupported = true,
isTagsExclusionSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchTags(namespace = 0),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val baseUrl = "https://$apiSuffix/books"
val url = buildString {
append(baseUrl)
val terms: MutableList<String> = mutableListOf()
val includedTags: MutableList<String> = mutableListOf()
val excludedTags: MutableList<String> = mutableListOf()
if (!filter.query.isNullOrEmpty() && filter.query.startsWith("id:")) {
val ipk = filter.query.removePrefix("id:")
val response = webClient.httpGet("$baseUrl/detail/$ipk").parseJson()
return listOf(parseMangaDetail(response))
}
val sortValue = when (order) {
SortOrder.POPULARITY, SortOrder.POPULARITY_TODAY -> "8"
SortOrder.POPULARITY_WEEK -> "9"
SortOrder.ALPHABETICAL -> "2"
SortOrder.ALPHABETICAL_DESC -> "2"
SortOrder.RATING -> "3"
SortOrder.NEWEST -> "4"
else -> "4"
}
append("?sort=").append(sortValue)
if (!filter.query.isNullOrEmpty()) {
terms.add("title:\"${filter.query.urlEncoded()}\"")
}
if (!filter.author.isNullOrEmpty()) {
val authors = authorsIds.getOrDefault(emptyMap())
val authorId = authors[filter.author.lowercase()]
if (authorId != null) {
includedTags.add(authorId)
} else {
terms.add("artist:\"${filter.author.urlEncoded()}\"")
}
}
filter.tags.forEach { tag ->
if (tag.key.startsWith("-")) {
excludedTags.add(tag.key.substring(1))
} else {
includedTags.add(tag.key)
}
}
if (excludedTags.isNotEmpty()) {
append("&exclude=").append(excludedTags.joinToString(","))
append("&e=1")
}
if (includedTags.isNotEmpty()) {
append("&include=").append(includedTags.joinToString(","))
append("&i=1")
}
append("&page=").append(page)
if (terms.isNotEmpty()) {
append("&s=").append(terms.joinToString(" ").urlEncoded())
}
}
val json = webClient.httpGet(url).parseJson()
json.getStringOrNull("error")?.let {
throw ParseException(it, url)
}
json.getStringOrNull("message")?.let {
throw ParseException(it, url)
}
return parseMangaList(json)
}
private fun parseMangaList(json: JSONObject): List<Manga> {
val entries = json.optJSONArray("entries") ?: return emptyList()
val results = ArrayList<Manga>(entries.length())
for (i in 0 until entries.length()) {
val entry = entries.getJSONObject(i)
val id = entry.getLong("id")
val key = entry.getString("key")
val url = "$id/$key"
results.add(
Manga(
id = generateUid(id),
url = url,
publicUrl = "https://$domain/g/$url",
title = entry.getString("title"),
altTitles = emptySet(),
authors = emptySet(),
tags = emptySet(),
rating = RATING_UNKNOWN,
state = null,
coverUrl = entry.getJSONObject("thumbnail").getString("path"),
contentRating = ContentRating.ADULT,
source = source,
),
)
}
return results
}
private fun parseMangaDetail(json: JSONObject): Manga {
val data = json.getJSONObject("data")
val id = data.getLong("id")
val key = data.getString("key")
val url = "$id/$key"
var author: String? = null
val tags = data.optJSONArray("tags")
if (tags != null) {
for (i in 0 until tags.length()) {
val tag = tags.getJSONObject(i)
if (tag.getInt("namespace") == 1) {
author = tag.getString("name")
break
}
}
}
return Manga(
id = generateUid(id),
url = url,
publicUrl = "https://$domain/g/$url",
title = data.getString("title"),
altTitles = emptySet(),
authors = setOfNotNull(author),
tags = emptySet(),
rating = RATING_UNKNOWN,
state = null,
coverUrl = data.getJSONObject("thumbnails").getJSONObject("main").getString("path"),
contentRating = ContentRating.ADULT,
source = source,
)
}
override suspend fun getDetails(manga: Manga): Manga {
val url = manga.url
val response = webClient.httpGet("https://$apiSuffix/books/detail/$url").parseJson()
val id = response.getLong("id")
val key = response.getString("key")
val mangaUrl = "$id/$key"
val tagsList = mutableSetOf<MangaTag>()
var author: String? = null
val tags = response.optJSONArray("tags")
if (tags != null) {
for (i in 0 until tags.length()) {
val tag = tags.getJSONObject(i)
if (tag.has("namespace")) {
val namespace = tag.getInt("namespace")
val tagName = tag.getString("name")
when (namespace) {
1 -> {
author = tagName
}
0, 3, 8, 9, 10, 12 -> {
tagsList.add(
MangaTag(
key = tagName,
title = tagName.toTitleCase(sourceLocale),
source = source,
),
)
}
}
} else {
val tagName = tag.getString("name")
tagsList.add(
MangaTag(
key = tagName,
title = tagName.toTitleCase(sourceLocale),
source = source,
),
)
}
}
}
val description = buildString {
val created = response.getLongOrDefault("created_at", 0L)
if (created > 0) {
append("<b>Posted:</b> ").append(SimpleDateFormat("yyyy-MM-dd", Locale.US).format(created)).append("\n")
}
val thumbnails = response.getJSONObject("thumbnails")
val pageCount = thumbnails.optJSONArray("entries")?.length() ?: 0
append("<b>Pages:</b> ").append(pageCount)
}
val thumbnails = response.getJSONObject("thumbnails")
val base = thumbnails.getString("base")
val mainPath = thumbnails.getJSONObject("main").getString("path")
val coverUrl = base + mainPath
return Manga(
id = generateUid(id),
url = mangaUrl,
publicUrl = "https://$domain/g/$mangaUrl",
title = response.getString("title"),
altTitles = emptySet(),
authors = setOfNotNull(author),
tags = tagsList,
rating = RATING_UNKNOWN,
state = MangaState.FINISHED,
description = description,
coverUrl = coverUrl,
contentRating = ContentRating.ADULT,
source = source,
chapters = listOf(
MangaChapter(
id = generateUid("$mangaUrl/chapter"),
title = null,
number = 1f,
url = mangaUrl,
scanlator = null,
uploadDate = response.getLongOrDefault("created_at", 0L),
branch = null,
source = source,
volume = 0,
),
),
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val mangaUrl = chapter.url
val parts = mangaUrl.split('/')
if (parts.size < 2) {
throw ParseException("Invalid URL", mangaUrl)
}
val id = parts[0]
val key = parts[1]
val clearance = getClearance(chapter.publicUrl())
val dataUrl = "https://$apiSuffix/books/detail/$id/$key?crt=$clearance"
val data = try {
webClient.httpPost(
url = dataUrl.toHttpUrl(),
form = emptyMap(),
extraHeaders = getRequestHeaders(),
).parseJson().getJSONObject("data")
} catch (e: HttpStatusException) {
if (e.statusCode == HttpURLConnection.HTTP_FORBIDDEN) {
// Token may be invalid or expired
// WebView should be closed after receiving Token
context.requestBrowserAction(this, chapter.publicUrl())
}
throw e
}
val preferredRes = config[preferredImageResolutionKey] ?: "1280"
val resolutionOrder = when (preferredRes) {
"1600" -> listOf("1600", "1280", "0", "980", "780")
"1280" -> listOf("1280", "1600", "0", "980", "780")
"980" -> listOf("980", "1280", "0", "1600", "780")
"780" -> listOf("780", "980", "0", "1280", "1600")
else -> listOf("0", "1600", "1280", "980", "780")
}
var selectedImageId: Int? = null
var selectedPublicKey: String? = null
var selectedQuality = "0"
for (res in resolutionOrder) {
if (data.has(res) && !data.isNull(res)) {
val resData = data.getJSONObject(res)
if (resData.has("id") && resData.has("key")) {
selectedImageId = resData.getInt("id")
selectedPublicKey = resData.getString("key")
selectedQuality = res
break
}
}
}
if (selectedImageId == null || selectedPublicKey == null) {
throw ParseException("Cant find image data", dataUrl)
}
val imagesResponse = webClient.httpGet(
"https://$apiSuffix/books/data/$id/$key/$selectedImageId/$selectedPublicKey/$selectedQuality?crt=$clearance",
).parseJson()
val base = imagesResponse.getString("base")
val entries = imagesResponse.getJSONArray("entries")
val pages = ArrayList<MangaPage>(entries.length())
for (i in 0 until entries.length()) {
val imagePath = entries.getJSONObject(i).getString("path")
val fullImageUrl = "$base$imagePath"
pages.add(
MangaPage(
id = generateUid(fullImageUrl),
url = fullImageUrl,
preview = null,
source = source,
),
)
}
return pages
}
private suspend fun fetchTags(namespace: Int): Set<MangaTag> =
webClient.httpGet("https://$apiSuffix/books/tags/filters").parseJsonArray().mapJSONNotNullToSet {
if (it.getIntOrDefault("namespace", 0) != namespace) {
null
} else {
MangaTag(
title = it.getStringOrNull("name")
?.toTitleCase(sourceLocale) ?: return@mapJSONNotNullToSet null,
key = it.getStringOrNull("id") ?: return@mapJSONNotNullToSet null,
source = source,
)
}
}
private suspend fun fetchAuthorsIds(): Map<String, String> = fetchTags(namespace = 1)
.associate { it.title.lowercase() to it.key }
private suspend fun getClearance(chapterUrl: String): String = WebViewHelper(context)
.getLocalStorageValue(domain, "clearance")?.removeSurrounding('"')?.nullIfEmpty()
?: context.requestBrowserAction(this, chapterUrl)
private fun MangaChapter.publicUrl() = "https://$domain/g/$url/read/1"
}

@ -2,14 +2,13 @@ package org.koitharu.kotatsu.parsers.site.all
import kotlinx.coroutines.async
import kotlinx.coroutines.coroutineScope
import okhttp3.Headers
import okhttp3.HttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrl
import org.json.JSONObject
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.AbstractMangaParser
import org.koitharu.kotatsu.parsers.exception.NotFoundException
import org.koitharu.kotatsu.parsers.exception.ParseException
import org.koitharu.kotatsu.parsers.model.*
@ -21,10 +20,17 @@ import javax.crypto.spec.SecretKeySpec
internal abstract class LineWebtoonsParser(
context: MangaLoaderContext,
source: MangaSource,
) : MangaParser(context, source) {
source: MangaParserSource,
) : AbstractMangaParser(context, source) {
override val isMultipleTagsSupported = false
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
)
private val signer by lazy {
WebtoonsUrlSigner("gUtPzJFZch4ZyAGviiyH94P99lQ3pFdRTwpJWDlSGFfwgpr6ses5ALOxWHOIT7R1")
@ -48,10 +54,13 @@ internal abstract class LineWebtoonsParser(
SortOrder.RATING,
SortOrder.UPDATED,
)
override val headers: Headers
get() = Headers.Builder()
.add("User-Agent", "nApps (Android 12;; linewebtoon; 3.1.0)")
.build()
override val userAgentKey = ConfigKey.UserAgent("nApps (Android 12;; linewebtoon; 3.1.0)")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override suspend fun getPageUrl(page: MangaPage): String {
return page.url.toAbsoluteUrl(staticDomain)
@ -77,7 +86,7 @@ internal abstract class LineWebtoonsParser(
val episodes = firstResult
.getJSONObject("episodeList")
.getJSONArray("episode")
.toJSONList()
.asTypedList<JSONObject>()
.toMutableList()
while (episodes.count() < totalEpisodeCount) {
@ -85,7 +94,7 @@ internal abstract class LineWebtoonsParser(
url = "/lineWebtoon/webtoon/challengeEpisodeList.json?v=2&titleNo=$titleNo&startIndex=${episodes.count()}&pageSize=30",
).getJSONObject("episodeList")
.getJSONArray("episode")
.toJSONList()
.asTypedList<JSONObject>()
episodes.addAll(page)
}
@ -93,8 +102,9 @@ internal abstract class LineWebtoonsParser(
return episodes.mapChapters { i, jo ->
MangaChapter(
id = generateUid("$titleNo-$i"),
name = jo.getString("episodeTitle"),
number = jo.getInt("episodeSeq"),
title = jo.getStringOrNull("episodeTitle"),
number = jo.getInt("episodeSeq").toFloat(),
volume = 0,
url = "$titleNo-${jo.get("episodeNo")}",
uploadDate = jo.getLong("modifyYmdt"),
branch = null,
@ -111,18 +121,20 @@ internal abstract class LineWebtoonsParser(
makeRequest("/lineWebtoon/webtoon/challengeTitleInfo.json?v=2&titleNo=${titleNo}")
.getJSONObject("titleInfo")
.let { jo ->
val isNsfwSource = jo.getBooleanOrDefault("ageGradeNotice", isNsfwSource)
val author = jo.getStringOrNull("writingAuthorName")
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitle = null,
altTitles = emptySet(),
url = "$titleNo",
publicUrl = "https://$domain/$languageCode/canvas/a/list?title_no=${titleNo}",
rating = jo.getFloatOrDefault("starScoreAverage", -10f) / 10f,
isNsfw = jo.getBooleanOrDefault("ageGradeNotice", isNsfwSource),
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOf(parseTag(jo.getJSONObject("genreInfo"))),
author = jo.getStringOrNull("writingAuthorName"),
authors = setOfNotNull(author),
description = jo.getString("synopsis"),
// I don't think the API provides this info
state = null,
@ -132,116 +144,82 @@ internal abstract class LineWebtoonsParser(
}
}
override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
val manga =
when (filter) {
is MangaListFilter.Search -> {
makeRequest("/lineWebtoon/webtoon/searchChallenge?query=${filter.query.urlEncoded()}&startIndex=${offset + 1}&pageSize=20")
.getJSONObject("challengeSearch")
.getJSONArray("titleList")
.mapJSON { jo ->
val titleNo = jo.getLong("titleNo")
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitle = null,
url = titleNo.toString(),
publicUrl = "https://$domain/$languageCode/canvas/a/list?title_no=$titleNo",
rating = RATING_UNKNOWN,
isNsfw = isNsfwSource,
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = null,
tags = emptySet(),
author = jo.getStringOrNull("writingAuthorName"),
description = null,
state = null,
source = source,
)
}
}
is MangaListFilter.Advanced -> {
override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val manga = when {
!filter.query.isNullOrEmpty() -> {
makeRequest("/lineWebtoon/webtoon/searchChallenge?query=${filter.query.urlEncoded()}&startIndex=${offset + 1}&pageSize=20")
.getJSONObject("challengeSearch")
.getJSONArray("titleList")
.mapJSON { jo ->
val titleNo = jo.getLong("titleNo")
val author = jo.getStringOrNull("writingAuthorName")
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitles = emptySet(),
url = titleNo.toString(),
publicUrl = "https://$domain/$languageCode/canvas/a/list?title_no=$titleNo",
rating = RATING_UNKNOWN,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = null,
tags = emptySet(),
authors = setOfNotNull(author),
description = null,
state = null,
source = source,
)
}
}
val genre = filter.tags.oneOrThrowIfMany()?.key ?: "ALL"
else -> {
val sortOrderStr = when (filter.sortOrder) {
SortOrder.UPDATED -> "UPDATE"
SortOrder.POPULARITY -> "READ_COUNT"
SortOrder.RATING -> "LIKEIT"
else -> throw IllegalArgumentException("Unsupported sort order: ${filter.sortOrder}")
}
val genre = filter.tags.oneOrThrowIfMany()?.key ?: "ALL"
val result =
makeRequest("/lineWebtoon/webtoon/challengeGenreTitleList.json?genre=$genre&sortOrder=$sortOrderStr&startIndex=${offset + 1}&pageSize=20")
val genres = result.getJSONObject("genreList")
.getJSONArray("challengeGenres")
.mapJSON { jo -> parseTag(jo) }
.associateBy { tag -> tag.key }
result
.getJSONObject("titleList")
.getJSONArray("titles")
.mapJSON { jo ->
val titleNo = jo.getLong("titleNo")
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitle = null,
url = titleNo.toString(),
publicUrl = "https://$domain/$languageCode/canvas/a/list?title_no=$titleNo",
rating = jo.getFloatOrDefault("starScoreAverage", -10f) / 10f,
isNsfw = jo.getBooleanOrDefault("ageGradeNotice", isNsfwSource),
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOfNotNull(genres[jo.getString("representGenre")]),
author = jo.getStringOrNull("writingAuthorName"),
description = jo.getString("synopsis"),
// I don't think the API provides this info
state = null,
source = source,
)
}
val sortOrderStr = when (order) {
SortOrder.UPDATED -> "UPDATE"
SortOrder.POPULARITY -> "READ_COUNT"
SortOrder.RATING -> "LIKEIT"
else -> throw IllegalArgumentException("Unsupported sort order: $order")
}
null -> {
val result =
makeRequest("/lineWebtoon/webtoon/challengeGenreTitleList.json?genre=ALL&sortOrder=UPDATE&startIndex=${offset + 1}&pageSize=20")
val genres = result.getJSONObject("genreList")
.getJSONArray("challengeGenres")
.mapJSON { jo -> parseTag(jo) }
.associateBy { tag -> tag.key }
result
.getJSONObject("titleList")
.getJSONArray("titles")
.mapJSON { jo ->
val titleNo = jo.getLong("titleNo")
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitle = null,
url = titleNo.toString(),
publicUrl = "https://$domain/$languageCode/canvas/a/list?title_no=$titleNo",
rating = jo.getFloatOrDefault("starScoreAverage", -10f) / 10f,
isNsfw = jo.getBooleanOrDefault("ageGradeNotice", isNsfwSource),
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOfNotNull(genres[jo.getString("representGenre")]),
author = jo.getStringOrNull("writingAuthorName"),
description = jo.getString("synopsis"),
// I don't think the API provides this info
state = null,
source = source,
)
}
}
val result =
makeRequest("/lineWebtoon/webtoon/challengeGenreTitleList.json?genre=$genre&sortOrder=$sortOrderStr&startIndex=${offset + 1}&pageSize=20")
val genres = result.getJSONObject("genreList")
.getJSONArray("challengeGenres")
.mapJSON { jo -> parseTag(jo) }
.associateBy { tag -> tag.key }
result
.getJSONObject("titleList")
.getJSONArray("titles")
.mapJSON { jo ->
val titleNo = jo.getLong("titleNo")
val isNsfwSource = jo.getBooleanOrDefault("ageGradeNotice", isNsfwSource)
val author = jo.getStringOrNull("writingAuthorName")
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitles = emptySet(),
url = titleNo.toString(),
publicUrl = "https://$domain/$languageCode/canvas/a/list?title_no=$titleNo",
rating = jo.getFloatOrDefault("starScoreAverage", -10f) / 10f,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOfNotNull(genres[jo.getString("representGenre")]),
authors = setOfNotNull(author),
description = jo.getString("synopsis"),
// I don't think the API provides this info
state = null,
source = source,
)
}
}
}
return manga
@ -263,6 +241,11 @@ internal abstract class LineWebtoonsParser(
}
}
override suspend fun resolveLink(resolver: LinkResolver, link: HttpUrl): Manga? {
val titleNo = link.queryParameter("title_no") ?: return null
return resolver.resolveManga(this, url = titleNo)
}
private fun parseTag(jo: JSONObject): MangaTag {
return MangaTag(
title = jo.getString("name"),
@ -271,7 +254,7 @@ internal abstract class LineWebtoonsParser(
)
}
override suspend fun getAvailableTags(): Set<MangaTag> {
private suspend fun fetchAvailableTags(): Set<MangaTag> {
return makeRequest("/lineWebtoon/webtoon/challengeGenreList.json")
.getJSONObject("genreList")
.getJSONArray("challengeGenres")
@ -307,25 +290,25 @@ internal abstract class LineWebtoonsParser(
}
@MangaSourceParser("LINEWEBTOONS_EN", "LineWebtoons English", "en", type = ContentType.MANGA)
class English(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.LINEWEBTOONS_EN)
class English(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaParserSource.LINEWEBTOONS_EN)
@MangaSourceParser("LINEWEBTOONS_ZH", "LineWebtoons Chinese", "zh", type = ContentType.MANGA)
class Chinese(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.LINEWEBTOONS_ZH)
class Chinese(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaParserSource.LINEWEBTOONS_ZH)
@MangaSourceParser("LINEWEBTOONS_TH", "LineWebtoons Thai", "th", type = ContentType.MANGA)
class Thai(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.LINEWEBTOONS_TH)
class Thai(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaParserSource.LINEWEBTOONS_TH)
@MangaSourceParser("LINEWEBTOONS_ID", "LineWebtoons Indonesian", "id", type = ContentType.MANGA)
class Indonesian(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.LINEWEBTOONS_ID)
class Indonesian(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaParserSource.LINEWEBTOONS_ID)
@MangaSourceParser("LINEWEBTOONS_ES", "LineWebtoons Spanish", "es", type = ContentType.MANGA)
class Spanish(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.LINEWEBTOONS_ES)
class Spanish(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaParserSource.LINEWEBTOONS_ES)
@MangaSourceParser("LINEWEBTOONS_FR", "LineWebtoons French", "fr", type = ContentType.MANGA)
class French(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.LINEWEBTOONS_FR)
class French(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaParserSource.LINEWEBTOONS_FR)
@MangaSourceParser("LINEWEBTOONS_DE", "LineWebtoons German", "de", type = ContentType.MANGA)
class German(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.LINEWEBTOONS_DE)
class German(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaParserSource.LINEWEBTOONS_DE)
private inner class WebtoonsUrlSigner(private val secret: String) {

@ -4,13 +4,21 @@ import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.async
import kotlinx.coroutines.awaitAll
import kotlinx.coroutines.coroutineScope
import okhttp3.HttpUrl
import org.json.JSONArray
import org.json.JSONObject
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.FlexibleMangaParser
import org.koitharu.kotatsu.parsers.exception.ParseException
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQuery
import org.koitharu.kotatsu.parsers.model.search.MangaSearchQueryCapabilities
import org.koitharu.kotatsu.parsers.model.search.QueryCriteria.*
import org.koitharu.kotatsu.parsers.model.search.SearchCapability
import org.koitharu.kotatsu.parsers.model.search.SearchableField
import org.koitharu.kotatsu.parsers.model.search.SearchableField.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.*
import java.text.SimpleDateFormat
@ -22,168 +30,264 @@ private const val CHAPTERS_MAX_PAGE_SIZE = 500
private const val CHAPTERS_PARALLELISM = 3
private const val CHAPTERS_MAX_COUNT = 10_000 // strange api behavior, looks like a bug
private const val LOCALE_FALLBACK = "en"
private const val SERVER_DATA = "data"
private const val SERVER_DATA_SAVER = "data-saver"
@MangaSourceParser("MANGADEX", "MangaDex")
internal class MangaDexParser(context: MangaLoaderContext) : MangaParser(context, MangaSource.MANGADEX) {
internal class MangaDexParser(context: MangaLoaderContext) : FlexibleMangaParser(context, MangaParserSource.MANGADEX) {
override val configKeyDomain = ConfigKey.Domain("mangadex.org")
override val availableSortOrders: EnumSet<SortOrder> = EnumSet.allOf(SortOrder::class.java)
private val preferredServerKey = ConfigKey.PreferredImageServer(
presetValues = mapOf(
SERVER_DATA to "Original quality",
SERVER_DATA_SAVER to "Compressed quality",
),
defaultValue = SERVER_DATA,
)
override val availableContentRating: Set<ContentRating> = EnumSet.allOf(ContentRating::class.java)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
keys.add(preferredServerKey)
}
override val availableSortOrders: EnumSet<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
SortOrder.UPDATED_ASC,
SortOrder.POPULARITY,
SortOrder.POPULARITY_ASC,
SortOrder.RATING,
SortOrder.RATING_ASC,
SortOrder.NEWEST,
SortOrder.NEWEST_ASC,
SortOrder.ALPHABETICAL,
SortOrder.ALPHABETICAL_DESC,
SortOrder.ADDED,
SortOrder.ADDED_ASC,
SortOrder.RELEVANCE,
)
override val searchQueryCapabilities: MangaSearchQueryCapabilities
get() = MangaSearchQueryCapabilities(
SearchCapability(
field = TAG,
criteriaTypes = setOf(Include::class, Exclude::class),
isMultiple = true,
),
SearchCapability(
field = TITLE_NAME,
criteriaTypes = setOf(Match::class),
isMultiple = false,
),
SearchCapability(
field = STATE,
criteriaTypes = setOf(Include::class),
isMultiple = true,
),
SearchCapability(
field = AUTHOR,
criteriaTypes = setOf(Include::class),
isMultiple = true,
),
SearchCapability(
field = CONTENT_TYPE,
criteriaTypes = setOf(Include::class),
isMultiple = true,
),
SearchCapability(
field = CONTENT_RATING,
criteriaTypes = setOf(Include::class),
isMultiple = true,
),
SearchCapability(
field = DEMOGRAPHIC,
criteriaTypes = setOf(Include::class),
isMultiple = true,
),
SearchCapability(
field = ORIGINAL_LANGUAGE,
criteriaTypes = setOf(Include::class),
isMultiple = true,
),
SearchCapability(
field = LANGUAGE,
criteriaTypes = setOf(Include::class),
isMultiple = true,
),
SearchCapability(
field = PUBLICATION_YEAR,
criteriaTypes = setOf(Match::class),
isMultiple = false,
),
)
override suspend fun getFilterOptions(): MangaListFilterOptions = coroutineScope {
val localesDeferred = async { fetchAvailableLocales() }
val tagsDeferred = async { fetchAvailableTags() }
MangaListFilterOptions(
availableTags = tagsDeferred.await(),
availableStates = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
MangaState.PAUSED,
MangaState.ABANDONED,
),
availableContentRating = EnumSet.allOf(ContentRating::class.java),
availableDemographics = EnumSet.of(
Demographic.SHOUNEN,
Demographic.SHOUJO,
Demographic.SEINEN,
Demographic.JOSEI,
Demographic.NONE,
),
availableLocales = localesDeferred.await(),
)
}
private fun SearchableField.toParamName(): String = when (this) {
TITLE_NAME -> "title"
TAG -> "includedTags[]"
AUTHOR -> "authors[]"
STATE -> "status[]"
CONTENT_TYPE -> "contentType[]"
CONTENT_RATING -> "contentRating[]"
DEMOGRAPHIC -> "publicationDemographic[]"
ORIGINAL_LANGUAGE -> "originalLanguage[]"
LANGUAGE -> "availableTranslatedLanguage[]"
PUBLICATION_YEAR -> "year"
}
private fun Any?.toQueryParam(): String = when (this) {
is String -> urlEncoded()
is Locale -> if (language == "in") "id" else language
is MangaTag -> key
is MangaState -> when (this) {
MangaState.ONGOING -> "ongoing"
MangaState.FINISHED -> "completed"
MangaState.ABANDONED -> "cancelled"
MangaState.PAUSED -> "hiatus"
else -> ""
}
is ContentRating -> when (this) {
ContentRating.SAFE -> "safe"
// quick fix for double value
ContentRating.SUGGESTIVE -> "suggestive&contentRating[]=erotica"
ContentRating.ADULT -> "pornographic"
}
override val availableStates: Set<MangaState> =
EnumSet.of(MangaState.ONGOING, MangaState.FINISHED, MangaState.PAUSED, MangaState.ABANDONED)
is Demographic -> when (this) {
Demographic.SHOUNEN -> "shounen"
Demographic.SHOUJO -> "shoujo"
Demographic.SEINEN -> "seinen"
Demographic.JOSEI -> "josei"
Demographic.NONE -> "none"
else -> ""
}
override val isTagsExclusionSupported: Boolean = true
is SortOrder -> when (this) {
SortOrder.UPDATED -> "[latestUploadedChapter]=desc"
SortOrder.UPDATED_ASC -> "[latestUploadedChapter]=asc"
SortOrder.RATING -> "[rating]=desc"
SortOrder.RATING_ASC -> "[rating]=asc"
SortOrder.ALPHABETICAL -> "[title]=asc"
SortOrder.ALPHABETICAL_DESC -> "[title]=desc"
SortOrder.NEWEST -> "[year]=desc"
SortOrder.NEWEST_ASC -> "[year]=asc"
SortOrder.POPULARITY -> "[followedCount]=desc"
SortOrder.POPULARITY_ASC -> "[followedCount]=asc"
SortOrder.ADDED -> "[createdAt]=desc"
SortOrder.ADDED_ASC -> "[createdAt]=asc"
SortOrder.RELEVANCE -> "&order[relevance]=desc"
else -> "[latestUploadedChapter]=desc"
}
else -> this.toString().urlEncoded()
}
override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
val domain = domain
private fun StringBuilder.appendCriterion(field: SearchableField, value: Any?, paramName: String? = null) {
val param = paramName ?: field.toParamName()
if (param.isNotBlank()) {
append("&$param=")
append(value.toQueryParam())
}
}
override suspend fun getList(query: MangaSearchQuery): List<Manga> {
val url = buildString {
append("https://api.")
append(domain)
append("/manga?limit=")
append(PAGE_SIZE)
append("&offset=")
append(offset)
append("&includes[]=cover_art&includes[]=author&includes[]=artist")
when (filter) {
is MangaListFilter.Search -> {
append("&title=")
append(filter.query)
}
append("https://api.$domain/manga?limit=$PAGE_SIZE&offset=${query.offset}")
.append("&includes[]=cover_art&includes[]=author&includes[]=artist&includedTagsMode=AND&excludedTagsMode=OR")
is MangaListFilter.Advanced -> {
filter.tags.forEach {
append("&includedTags[]=")
append(it.key)
}
var hasContentRating = false
filter.tagsExclude.forEach {
append("&excludedTags[]=")
append(it.key)
query.criteria.forEach { criterion ->
when (criterion) {
is Include<*> -> {
if (criterion.field == CONTENT_RATING) {
hasContentRating = true
}
criterion.values.forEach { appendCriterion(criterion.field, it) }
}
if (filter.contentRating.isNotEmpty()) {
filter.contentRating.forEach {
when (it) {
ContentRating.SAFE -> append("&contentRating[]=safe")
ContentRating.SUGGESTIVE -> append("&contentRating[]=suggestive&contentRating[]=erotica")
ContentRating.ADULT -> append("&contentRating[]=pornographic")
}
}
is Exclude<*> -> {
criterion.values.forEach { appendCriterion(criterion.field, it, "excludedTags[]") }
}
append("&order")
append(
when (filter.sortOrder) {
SortOrder.UPDATED -> "[latestUploadedChapter]=desc"
SortOrder.RATING -> "[rating]=desc"
SortOrder.ALPHABETICAL -> "[title]=asc"
SortOrder.ALPHABETICAL_DESC -> "[title]=desc"
SortOrder.NEWEST -> "[createdAt]=desc"
SortOrder.POPULARITY -> "[followedCount]=desc"
},
)
filter.states.forEach {
append("&status[]=")
when (it) {
MangaState.ONGOING -> append("ongoing")
MangaState.FINISHED -> append("completed")
MangaState.ABANDONED -> append("cancelled")
MangaState.PAUSED -> append("hiatus")
else -> append("")
}
is Match<*> -> {
appendCriterion(criterion.field, criterion.value)
}
filter.locale?.let {
append("&availableTranslatedLanguage[]=")
append(it.language)
else -> {
// Not supported
}
}
}
null -> {
append("&order[latestUploadedChapter]=desc")
}
// If contentRating is not provided, add default values
if (!hasContentRating) {
append("&contentRating[]=safe&contentRating[]=suggestive&contentRating[]=erotica&contentRating[]=pornographic")
}
append("&order")
append((query.order ?: defaultSortOrder).toQueryParam())
}
val json = webClient.httpGet(url).parseJson().getJSONArray("data")
return json.mapJSON { jo ->
val id = jo.getString("id")
val attrs = jo.getJSONObject("attributes")
val relations = jo.getJSONArray("relationships").associateByKey("type")
val cover = relations["cover_art"]
?.getJSONObject("attributes")
?.getString("fileName")
?.let {
"https://uploads.$domain/covers/$id/$it"
}
Manga(
id = generateUid(id),
title = requireNotNull(attrs.getJSONObject("title").selectByLocale()) {
"Title should not be null"
},
altTitle = attrs.optJSONObject("altTitles")?.selectByLocale(),
url = id,
publicUrl = "https://$domain/title/$id",
rating = RATING_UNKNOWN,
isNsfw = when (attrs.getStringOrNull("contentRating")) {
"erotica", "pornographic" -> true
else -> false
},
coverUrl = cover?.plus(".256.jpg").orEmpty(),
largeCoverUrl = cover,
description = attrs.optJSONObject("description")?.selectByLocale(),
tags = attrs.getJSONArray("tags").mapJSONToSet { tag ->
MangaTag(
title = tag.getJSONObject("attributes")
.getJSONObject("name")
.firstStringValue()
.toTitleCase(),
key = tag.getString("id"),
source = source,
)
},
state = when (attrs.getStringOrNull("status")) {
"ongoing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED
"hiatus" -> MangaState.PAUSED
"cancelled" -> MangaState.ABANDONED
else -> null
},
author = (relations["author"] ?: relations["artist"])
?.getJSONObject("attributes")
?.getStringOrNull("name"),
source = source,
)
}
return json.mapJSON { jo -> jo.fetchManga(null) }
}
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val domain = domain
override suspend fun getDetails(manga: Manga): Manga {
val mangaId = manga.url.removePrefix("/")
val attrsDeferred = async {
return getDetails(mangaId)
}
override suspend fun resolveLink(resolver: LinkResolver, link: HttpUrl): Manga? {
val regex = Regex("[0-9a-f\\-]{10,}", RegexOption.IGNORE_CASE)
val mangaId = link.pathSegments.find { regex.matches(it) } ?: return null
return getDetails(mangaId)
}
private suspend fun getDetails(mangaId: String): Manga = coroutineScope {
val jsonDeferred = async {
webClient.httpGet(
"https://api.$domain/manga/${mangaId}?includes[]=artist&includes[]=author&includes[]=cover_art",
).parseJson().getJSONObject("data").getJSONObject("attributes")
).parseJson().getJSONObject("data")
}
val feedDeferred = async { loadChapters(mangaId) }
val mangaAttrs = attrsDeferred.await()
val feed = feedDeferred.await()
manga.copy(
description = mangaAttrs.optJSONObject("description")?.selectByLocale()
?: manga.description,
chapters = mapChapters(feed),
)
jsonDeferred.await().fetchManga(mapChapters(feedDeferred.await()))
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val domain = domain
val chapterJson = webClient.httpGet("https://api.$domain/at-home/server/${chapter.url}?forcePort443=false")
.parseJson()
.getJSONObject("chapter")
val pages = chapterJson.getJSONArray("data")
val prefix = "https://uploads.$domain/data/${chapterJson.getString("hash")}/"
val json = webClient.httpGet(
"https://api.$domain/at-home/server/${chapter.url}?forcePort443=false",
).parseJson()
val chapterJson = json.getJSONObject("chapter")
val server = config[preferredServerKey] ?: SERVER_DATA
val pages = chapterJson.getJSONArray(
if (server == SERVER_DATA_SAVER) "dataSaver" else "data",
)
val prefix = "${json.getString("baseUrl")}/$server/${chapterJson.getString("hash")}/"
return List(pages.length()) { i ->
val url = prefix + pages.getString(i)
MangaPage(
@ -195,7 +299,7 @@ internal class MangaDexParser(context: MangaLoaderContext) : MangaParser(context
}
}
override suspend fun getAvailableTags(): Set<MangaTag> {
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val tags = webClient.httpGet("https://api.${domain}/manga/tag").parseJson()
.getJSONArray("data")
return tags.mapJSONToSet { jo ->
@ -209,7 +313,7 @@ internal class MangaDexParser(context: MangaLoaderContext) : MangaParser(context
}
}
override suspend fun getAvailableLocales(): Set<Locale> {
private suspend fun fetchAvailableLocales(): Set<Locale> {
val head = webClient.httpGet("https://$domain/").parseHtml().head()
return head.getElementsByAttributeValue("property", "og:locale:alternate")
.mapNotNullToSet { meta ->
@ -218,7 +322,64 @@ internal class MangaDexParser(context: MangaLoaderContext) : MangaParser(context
}
}
private fun JSONObject.firstStringValue() = values().next() as String
private fun JSONObject.fetchManga(chapters: List<MangaChapter>?): Manga {
val id = getString("id")
val attrs = getJSONObject("attributes")
val relations = getJSONArray("relationships").associateByKey("type")
val cover = relations["cover_art"]
?.firstOrNull()
?.getJSONObject("attributes")
?.getString("fileName")
?.let {
"https://uploads.$domain/covers/$id/$it"
}
val authors: Set<String> = (relations["author"] ?: relations["artist"])
?.mapNotNullToSet {
it.getJSONObject("attributes")?.getStringOrNull("name")
}.orEmpty()
return Manga(
id = generateUid(id),
title = requireNotNull(attrs.getJSONObject("title").selectByLocale()) {
"Title should not be null"
},
altTitles = setOfNotNull(attrs.optJSONArray("altTitles")?.flatten()?.selectByLocale()), // TODO
url = id,
publicUrl = "https://$domain/title/$id",
rating = RATING_UNKNOWN,
contentRating = when (attrs.getStringOrNull("contentRating")) {
"pornographic" -> ContentRating.ADULT
"erotica", "suggestive" -> ContentRating.SUGGESTIVE
"safe" -> ContentRating.SAFE
else -> null
},
coverUrl = cover?.plus(".256.jpg"),
largeCoverUrl = cover,
description = attrs.optJSONObject("description")?.selectByLocale(),
tags = attrs.getJSONArray("tags").mapJSONToSet { tag ->
MangaTag(
title = tag.getJSONObject("attributes")
.getJSONObject("name")
.firstStringValue()
.toTitleCase(),
key = tag.getString("id"),
source = source,
)
},
state = when (attrs.getStringOrNull("status")) {
"ongoing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED
"hiatus" -> MangaState.PAUSED
"cancelled" -> MangaState.ABANDONED
else -> null
},
authors = authors,
chapters = chapters,
source = source,
)
}
private fun JSONObject.firstStringValue() = entries<String>().first().value
private fun JSONObject.selectByLocale(): String? {
val preferredLocales = context.getPreferredLocales()
@ -226,7 +387,20 @@ internal class MangaDexParser(context: MangaLoaderContext) : MangaParser(context
getStringOrNull(locale.language)?.let { return it }
getStringOrNull(locale.toLanguageTag())?.let { return it }
}
return getStringOrNull(LOCALE_FALLBACK) ?: values().nextOrNull() as? String
return getStringOrNull(LOCALE_FALLBACK) ?: entries<String>().firstOrNull()?.value?.nullIfEmpty()
}
private fun JSONArray.flatten(): JSONObject {
val result = JSONObject()
repeat(length()) { i ->
val jo = optJSONObject(i)
if (jo != null) {
for (key in jo.keys()) {
result.put(key, jo.get(key))
}
}
}
return result
}
private suspend fun loadChapters(mangaId: String): List<JSONObject> {
@ -272,7 +446,7 @@ internal class MangaDexParser(context: MangaLoaderContext) : MangaParser(context
val json = webClient.httpGet(url).parseJson()
if (json.getString("result") == "ok") {
return Chapters(
data = json.optJSONArray("data")?.toJSONList().orEmpty(),
data = json.optJSONArray("data")?.asTypedList<JSONObject>().orEmpty(),
total = json.getInt("total"),
)
} else {
@ -290,41 +464,52 @@ internal class MangaDexParser(context: MangaLoaderContext) : MangaParser(context
Locale.ROOT,
)
val chaptersBuilder = ChaptersListBuilder(list.size)
val branchedChapters = HashMap<String?, HashMap<Float, MangaChapter>>()
val branchedChapters = HashMap<String?, HashMap<Pair<Int, Float>, MangaChapter>>()
for (jo in list) {
val id = jo.getString("id")
val attrs = jo.getJSONObject("attributes")
if (!attrs.isNull("externalUrl")) {
continue
}
val number = jo.getJSONObject("attributes").getFloatOrDefault("chapter", 0f)
val number = attrs.getFloatOrDefault("chapter", 0f)
val volume = attrs.getIntOrDefault("volume", 0)
val locale = attrs.getStringOrNull("translatedLanguage")?.let { Locale.forLanguageTag(it) }
val lc = locale?.getDisplayName(locale)?.toTitleCase(locale)
val relations = jo.getJSONArray("relationships").associateByKey("type")
val team = relations["scanlation_group"]?.getJSONObject("attributes")?.getStringOrNull("name")
?.takeUnless { it.isBlank() }
val team =
relations["scanlation_group"]?.firstOrNull()?.optJSONObject("attributes")?.getStringOrNull("name")
val branch = (list.indices).firstNotNullOf { i ->
val b = if (i == 0) lc else "$lc ($i)"
if (branchedChapters[b]?.get(number) == null) b else null
if (branchedChapters[b]?.get(volume to number) == null) b else null
}
val chapter = MangaChapter(
id = generateUid(id),
name = attrs.getStringOrNull("title")?.takeUnless(String::isEmpty)
?: "Chapter #${number.toString().removeSuffix(".0")}",
number = if (number <= 0f) (branchedChapters[branch]?.size?.plus(1) ?: 0) else number.toInt(),
title = attrs.getStringOrNull("title"),
number = number,
volume = volume,
url = id,
scanlator = team,
uploadDate = dateFormat.tryParse(attrs.getString("publishAt")),
uploadDate = dateFormat.parseSafe(attrs.getString("publishAt")),
branch = branch,
source = source,
)
if (chaptersBuilder.add(chapter)) {
branchedChapters.getOrPut(branch, ::HashMap)[number] = chapter
branchedChapters.getOrPut(branch, ::HashMap)[volume to number] = chapter
}
}
return chaptersBuilder.toList()
}
private fun JSONArray.associateByKey(key: String): Map<String, List<JSONObject>> {
val destination = LinkedHashMap<String, MutableList<JSONObject>>(length())
repeat(length()) { i ->
val item = getJSONObject(i)
val keyValue = item.getString(key)
destination.computeIfAbsent(keyValue) { mutableListOf() }.add(item)
}
return destination
}
private class Chapters(
val data: List<JSONObject>,
val total: Int,

@ -0,0 +1,493 @@
package org.koitharu.kotatsu.parsers.site.all
import kotlinx.coroutines.async
import kotlinx.coroutines.awaitAll
import kotlinx.coroutines.coroutineScope
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.Interceptor
import okhttp3.Response
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.bitmap.Rect
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.text.SimpleDateFormat
import java.util.*
import kotlin.math.min
private const val PIECE_SIZE = 200
private const val MIN_SPLIT_COUNT = 5
internal abstract class MangaFireParser(
context: MangaLoaderContext,
source: MangaParserSource,
private val siteLang: String,
) : PagedMangaParser(context, source, 30), Interceptor, MangaParserAuthProvider {
override val configKeyDomain: ConfigKey.Domain = ConfigKey.Domain("mangafire.to")
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
SortOrder.POPULARITY,
SortOrder.RATING,
SortOrder.NEWEST,
SortOrder.ALPHABETICAL,
SortOrder.RELEVANCE,
)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val authUrl: String
get() = "https://${domain}"
override suspend fun isAuthorized(): Boolean {
return context.cookieJar.getCookies(domain).any {
it.value.contains("user")
}
}
override suspend fun getUsername(): String {
val body = webClient.httpGet("https://${domain}/user/profile").parseHtml().body()
return body.selectFirst("form.ajax input[name*=username]")?.attr("value")
?: body.parseFailed("Cannot find username")
}
private val tags = suspendLazy(soft = true) {
webClient.httpGet("https://$domain/filter").parseHtml()
.select(".genres > li").map {
MangaTag(
title = it.selectFirstOrThrow("label").ownText().toTitleCase(sourceLocale),
key = it.selectFirstOrThrow("input").attr("value"),
source = source,
)
}.associateBy { it.title }
}
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = tags.get().values.toSet(),
availableStates = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
MangaState.ABANDONED,
MangaState.PAUSED,
MangaState.UPCOMING,
),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = "https://$domain/filter".toHttpUrl().newBuilder().apply {
addQueryParameter("page", page.toString())
addQueryParameter("language[]", siteLang)
when {
!filter.query.isNullOrEmpty() -> {
val encodedQuery = filter.query.splitByWhitespace().joinToString(separator = "+") { part ->
part.urlEncoded()
}
addEncodedQueryParameter("keyword", encodedQuery)
addQueryParameter(
name = "sort",
value = when (order) {
SortOrder.UPDATED -> "recently_updated"
SortOrder.POPULARITY -> "most_viewed"
SortOrder.RATING -> "scores"
SortOrder.NEWEST -> "release_date"
SortOrder.ALPHABETICAL -> "title_az"
SortOrder.RELEVANCE -> "most_relevance"
else -> ""
},
)
}
else -> {
filter.tagsExclude.forEach { tag ->
addQueryParameter("genre[]", "-${tag.key}")
}
filter.tags.forEach { tag ->
addQueryParameter("genre[]", tag.key)
}
filter.locale?.let {
addQueryParameter("language[]", it.language)
}
filter.states.forEach { state ->
addQueryParameter(
name = "status[]",
value = when (state) {
MangaState.ONGOING -> "releasing"
MangaState.FINISHED -> "completed"
MangaState.ABANDONED -> "discontinued"
MangaState.PAUSED -> "on_hiatus"
MangaState.UPCOMING -> "info"
else -> throw IllegalArgumentException("$state not supported")
},
)
}
addQueryParameter(
name = "sort",
value = when (order) {
SortOrder.UPDATED -> "recently_updated"
SortOrder.POPULARITY -> "most_viewed"
SortOrder.RATING -> "scores"
SortOrder.NEWEST -> "release_date"
SortOrder.ALPHABETICAL -> "title_az"
SortOrder.RELEVANCE -> "most_relevance"
else -> ""
},
)
}
}
}.build()
return webClient.httpGet(url)
.parseHtml().parseMangaList()
}
private fun Document.parseMangaList(): List<Manga> {
return select(".original.card-lg .unit .inner").map {
val a = it.selectFirstOrThrow(".info > a")
val mangaUrl = a.attrAsRelativeUrl("href")
Manga(
id = generateUid(mangaUrl),
url = mangaUrl,
publicUrl = mangaUrl.toAbsoluteUrl(domain),
title = a.ownText(),
coverUrl = it.selectFirstOrThrow("img").attrAsAbsoluteUrl("src"),
source = source,
altTitles = emptySet(),
largeCoverUrl = null,
authors = emptySet(),
contentRating = null,
rating = RATING_UNKNOWN,
state = null,
tags = emptySet(),
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val document = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val availableTags = tags.get()
var isAdult = false
var isSuggestive = false
val author = document.select("div.meta a[href*=/author/]")
.joinToString { it.ownText() }.nullIfEmpty()
return manga.copy(
title = document.selectFirstOrThrow(".info > h1").ownText(),
altTitles = setOfNotNull(document.selectFirst(".info > h6")?.ownTextOrNull()),
rating = document.selectFirst("div.rating-box")?.attr("data-score")
?.toFloatOrNull()?.div(10) ?: RATING_UNKNOWN,
coverUrl = document.selectFirstOrThrow("div.manga-detail div.poster img")
.attrAsAbsoluteUrl("src"),
tags = document.select("div.meta a[href*=/genre/]").mapNotNullToSet {
val tag = it.ownText()
if (tag == "Hentai") {
isAdult = true
} else if (tag == "Ecchi") {
isSuggestive = true
}
availableTags[tag.toTitleCase(sourceLocale)]
},
contentRating = when {
isAdult -> ContentRating.ADULT
isSuggestive -> ContentRating.SUGGESTIVE
else -> ContentRating.SAFE
},
state = document.selectFirst(".info > p")?.ownText()?.let {
when (it.lowercase()) {
"releasing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED
"discontinued" -> MangaState.ABANDONED
"on_hiatus" -> MangaState.PAUSED
"info" -> MangaState.UPCOMING
else -> null
}
},
authors = setOfNotNull(author),
description = document.selectFirstOrThrow("#synopsis div.modal-content").html(),
chapters = getChapters(manga.url, document),
)
}
private data class ChapterBranch(
val type: String,
val langCode: String,
val langTitle: String,
)
private suspend fun getChapters(mangaUrl: String, document: Document): List<MangaChapter> {
val availableTypes = document.select(".chapvol-tab > a").map {
it.attr("data-name")
}
val langTypePairs = document.select(".m-list div.tab-content").flatMap {
val type = it.attr("data-name")
it.select(".list-menu .dropdown-item").map { item ->
ChapterBranch(
type = type,
langCode = item.attr("data-code").lowercase(),
langTitle = item.attr("data-title"),
)
}
}.filter {
it.langCode == siteLang && availableTypes.contains(it.type)
}
val id = mangaUrl.substringAfterLast('.')
return coroutineScope {
langTypePairs.map {
async {
getChaptersBranch(id, it)
}
}.awaitAll().flatten()
}
}
private suspend fun getChaptersBranch(mangaId: String, branch: ChapterBranch): List<MangaChapter> {
val chapterElements = webClient
.httpGet("https://$domain/ajax/read/$mangaId/${branch.type}/${branch.langCode}")
.parseJson()
.getJSONObject("result")
.getString("html")
.let(Jsoup::parseBodyFragment)
.select("ul li a")
if (branch.type == "chapter") {
val doc = webClient
.httpGet("https://$domain/ajax/manga/$mangaId/${branch.type}/${branch.langCode}")
.parseJson()
.getString("result")
.let(Jsoup::parseBodyFragment)
doc.select("ul li a").withIndex().forEach { (i, it) ->
val date = it.select("span")[1].ownText()
chapterElements[i].attr("upload-date", date)
chapterElements[i].attr("other-title", it.attr("title"))
}
}
return chapterElements.mapChapters(reversed = true) { _, it ->
MangaChapter(
id = generateUid(it.attr("href")),
title = it.attr("title").ifBlank {
"${branch.type.toTitleCase()} ${it.attr("data-number")}"
},
number = it.attr("data-number").toFloat(),
volume = it.attr("other-title").let {
volumeNumRegex.find(it)?.groupValues?.getOrNull(2)?.toInt() ?: 0
},
url = "${branch.type}/${it.attr("data-id")}",
scanlator = null,
uploadDate = dateFormat.parseSafe(it.attr("upload-date")),
branch = "${branch.langTitle} ${branch.type.toTitleCase()}",
source = source,
)
}
}
private val dateFormat = SimpleDateFormat("MMM dd, yyyy", Locale.ENGLISH)
private val volumeNumRegex = Regex("""vol(ume)?\s*(\d+)""", RegexOption.IGNORE_CASE)
override suspend fun getRelatedManga(seed: Manga): List<Manga> = coroutineScope {
val document = webClient.httpGet(seed.url.toAbsoluteUrl(domain)).parseHtml()
val total = document.select(
"section.m-related a[href*=/manga/], .side-manga:not(:has(.head:contains(trending))) .unit",
).size
val mangas = ArrayList<Manga>(total)
// "Related Manga"
document.select("section.m-related a[href*=/manga/]").map {
async {
val url = it.attrAsRelativeUrl("href")
val mangaDocument = webClient
.httpGet(url.toAbsoluteUrl(domain))
.parseHtml()
val chaptersInManga = mangaDocument.select(".m-list div.tab-content .list-menu .dropdown-item")
.map { it.attr("data-code").lowercase() }
if (!chaptersInManga.contains(siteLang)) {
return@async null
}
Manga(
id = generateUid(url),
url = url,
publicUrl = url.toAbsoluteUrl(domain),
title = it.ownText(),
coverUrl = mangaDocument.selectFirstOrThrow("div.manga-detail div.poster img")
.attrAsAbsoluteUrl("src"),
source = source,
altTitles = emptySet(),
largeCoverUrl = null,
authors = emptySet(),
contentRating = null,
rating = RATING_UNKNOWN,
state = null,
tags = emptySet(),
)
}
}.awaitAll()
.filterNotNullTo(mangas)
// "You may also like"
document.select(".side-manga:not(:has(.head:contains(trending))) .unit").forEach {
val url = it.attrAsRelativeUrl("href")
mangas.add(
Manga(
id = generateUid(url),
url = url,
publicUrl = url.toAbsoluteUrl(domain),
title = it.selectFirstOrThrow(".info h6").ownText(),
coverUrl = it.selectFirstOrThrow(".poster img").attrAsAbsoluteUrl("src"),
source = source,
altTitles = emptySet(),
largeCoverUrl = null,
authors = emptySet(),
contentRating = null,
rating = RATING_UNKNOWN,
state = null,
tags = emptySet(),
),
)
}
mangas.ifEmpty {
// fallback: author's other works
document.select("div.meta a[href*=/author/]").map {
async {
val url = it.attrAsAbsoluteUrl("href").toHttpUrl()
.newBuilder()
.addQueryParameter("language[]", siteLang)
.build()
webClient.httpGet(url)
.parseHtml().parseMangaList()
}
}.awaitAll().flatten()
}
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val images = webClient
.httpGet("https://$domain/ajax/read/${chapter.url}")
.parseJson()
.getJSONObject("result")
.getJSONArray("images")
val pages = ArrayList<MangaPage>(images.length())
for (i in 0 until images.length()) {
val img = images.getJSONArray(i)
val url = img.getString(0)
val offset = img.getInt(2)
pages.add(
MangaPage(
id = generateUid(url),
url = if (offset < 1) {
url
} else {
"$url#scrambled_$offset"
},
preview = null,
source = source,
),
)
}
return pages
}
override fun intercept(chain: Interceptor.Chain): Response {
val request = chain.request()
val response = chain.proceed(request)
if (request.url.fragment?.startsWith("scrambled") != true) {
return response
}
return context.redrawImageResponse(response) { bitmap ->
val offset = request.url.fragment!!.substringAfter("_").toInt()
val width = bitmap.width
val height = bitmap.height
val result = context.createBitmap(width, height)
val pieceWidth = min(PIECE_SIZE, width.ceilDiv(MIN_SPLIT_COUNT))
val pieceHeight = min(PIECE_SIZE, height.ceilDiv(MIN_SPLIT_COUNT))
val xMax = width.ceilDiv(pieceWidth) - 1
val yMax = height.ceilDiv(pieceHeight) - 1
for (y in 0..yMax) {
for (x in 0..xMax) {
val xDst = pieceWidth * x
val yDst = pieceHeight * y
val w = min(pieceWidth, width - xDst)
val h = min(pieceHeight, height - yDst)
val xSrc = pieceWidth * when (x) {
xMax -> x // margin
else -> (xMax - x + offset) % xMax
}
val ySrc = pieceHeight * when (y) {
yMax -> y // margin
else -> (yMax - y + offset) % yMax
}
val srcRect = Rect(xSrc, ySrc, xSrc + w, ySrc + h)
val dstRect = Rect(xDst, yDst, xDst + w, yDst + h)
result.drawBitmap(bitmap, srcRect, dstRect)
}
}
result
}
}
private fun Int.ceilDiv(other: Int) = (this + (other - 1)) / other
@MangaSourceParser("MANGAFIRE_EN", "MangaFire English", "en")
class English(context: MangaLoaderContext) : MangaFireParser(context, MangaParserSource.MANGAFIRE_EN, "en")
@MangaSourceParser("MANGAFIRE_ES", "MangaFire Spanish", "es")
class Spanish(context: MangaLoaderContext) : MangaFireParser(context, MangaParserSource.MANGAFIRE_ES, "es")
@MangaSourceParser("MANGAFIRE_ESLA", "MangaFire Spanish (Latim)", "es")
class SpanishLatim(context: MangaLoaderContext) :
MangaFireParser(context, MangaParserSource.MANGAFIRE_ESLA, "es-la")
@MangaSourceParser("MANGAFIRE_FR", "MangaFire French", "fr")
class French(context: MangaLoaderContext) : MangaFireParser(context, MangaParserSource.MANGAFIRE_FR, "fr")
@MangaSourceParser("MANGAFIRE_JA", "MangaFire Japanese", "ja")
class Japanese(context: MangaLoaderContext) : MangaFireParser(context, MangaParserSource.MANGAFIRE_JA, "ja")
@MangaSourceParser("MANGAFIRE_PT", "MangaFire Portuguese", "pt")
class Portuguese(context: MangaLoaderContext) : MangaFireParser(context, MangaParserSource.MANGAFIRE_PT, "pt")
@MangaSourceParser("MANGAFIRE_PTBR", "MangaFire Portuguese (Brazil)", "pt")
class PortugueseBR(context: MangaLoaderContext) :
MangaFireParser(context, MangaParserSource.MANGAFIRE_PTBR, "pt-br")
}

@ -4,102 +4,154 @@ import androidx.collection.ArrayMap
import kotlinx.coroutines.coroutineScope
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.text.DateFormat
import java.text.SimpleDateFormat
import java.util.*
@MangaSourceParser("MANGAPARK", "MangaPark")
internal class MangaPark(context: MangaLoaderContext) :
PagedMangaParser(context, MangaSource.MANGAPARK, pageSize = 36) {
PagedMangaParser(context, MangaParserSource.MANGAPARK, pageSize = 36) {
override val availableSortOrders: Set<SortOrder> = EnumSet.allOf(SortOrder::class.java)
override val availableStates: Set<MangaState> = EnumSet.allOf(MangaState::class.java)
override val configKeyDomain = ConfigKey.Domain(
"mangapark.net",
"mangapark.com",
"mangapark.org",
"mangapark.me",
"mangapark.io",
"mangapark.to",
"comicpark.org",
"comicpark.to",
"readpark.org",
"readpark.net",
"parkmanga.com",
"parkmanga.net",
"parkmanga.org",
"mpark.to",
)
override val availableContentRating: Set<ContentRating> = EnumSet.of(ContentRating.SAFE)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val isTagsExclusionSupported: Boolean = true
override val availableSortOrders: Set<SortOrder> =
EnumSet.of(SortOrder.POPULARITY, SortOrder.UPDATED, SortOrder.NEWEST, SortOrder.ALPHABETICAL, SortOrder.RATING)
override val configKeyDomain = ConfigKey.Domain("mangapark.net")
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchSupported = true,
isSearchWithFiltersSupported = true,
isOriginalLocaleSupported = true,
)
private val tagsMap = SuspendLazy(::parseTags)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = tagsMap.get().values.toSet(),
availableStates = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
MangaState.ABANDONED,
MangaState.PAUSED,
MangaState.UPCOMING,
),
availableContentRating = EnumSet.of(ContentRating.SAFE),
availableLocales = setOf(
Locale("af"), Locale("sq"), Locale("am"), Locale("ar"), Locale("hy"),
Locale("az"), Locale("be"), Locale("bn"), Locale("zh_hk"), Locale("zh_tw"),
Locale.CHINESE, Locale("ceb"), Locale("ca"), Locale("km"), Locale("my"),
Locale("bg"), Locale("bs"), Locale("hr"), Locale("cs"), Locale("da"),
Locale("nl"), Locale.ENGLISH, Locale("et"), Locale("fo"), Locale("fil"),
Locale("fi"), Locale("he"), Locale("ha"), Locale("jv"), Locale("lb"),
Locale("mn"), Locale("ro"), Locale("si"), Locale("ta"), Locale("uz"),
Locale("ur"), Locale("tg"), Locale("sd"), Locale("pt_br"), Locale("mo"),
Locale("lt"), Locale.JAPANESE, Locale.ITALIAN, Locale("ht"), Locale("lv"),
Locale("mr"), Locale("pt"), Locale("sn"), Locale("sv"), Locale("uk"),
Locale("tk"), Locale("sw"), Locale("st"), Locale("pl"), Locale("mi"),
Locale("lo"), Locale("ga"), Locale("gu"), Locale("gn"), Locale("id"),
Locale("ky"), Locale("mt"), Locale("fa"), Locale("sh"), Locale("es_419"),
Locale("tr"), Locale("to"), Locale("vi"), Locale("es"), Locale("sr"),
Locale("ps"), Locale("ml"), Locale("ku"), Locale("ig"), Locale("el"),
Locale.GERMAN, Locale("is"), Locale.KOREAN, Locale("ms"), Locale("ny"), Locale("sm"),
Locale("so"), Locale("ti"), Locale("zu"), Locale("yo"), Locale("th"),
Locale("sl"), Locale("ru"), Locale("no"), Locale("mg"), Locale("kk"),
Locale("hu"), Locale("ka"), Locale.FRENCH, Locale("hi"), Locale("kn"),
Locale("mk"), Locale("ne"), Locale("rm"), Locale("sk"), Locale("te"),
),
)
init {
context.cookieJar.insertCookies(domain, "nsfw", "2")
}
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
append("/search?page=")
append(page.toString())
when (filter) {
is MangaListFilter.Search -> {
append("&word=")
append(filter.query.urlEncoded())
}
filter.query?.let {
append("&word=")
append(filter.query.urlEncoded())
}
is MangaListFilter.Advanced -> {
append("&genres=")
if (filter.tags.isNotEmpty()) {
appendAll(filter.tags, ",") { it.key }
}
append("|")
if (filter.tagsExclude.isNotEmpty()) {
appendAll(filter.tagsExclude, ",") { it.key }
}
if (filter.contentRating.isNotEmpty()) {
filter.contentRating.oneOrThrowIfMany()?.let {
append(
when (it) {
ContentRating.SAFE -> append(",gore,bloody,violence,ecchi,adult,mature,smut,hentai")
else -> append("")
},
)
}
}
filter.states.oneOrThrowIfMany()?.let {
append("&status=")
append(
when (it) {
MangaState.ONGOING -> "ongoing"
MangaState.FINISHED -> "completed"
MangaState.PAUSED -> "hiatus"
MangaState.ABANDONED -> "cancelled"
MangaState.UPCOMING -> "pending"
},
)
}
append("&sortby=")
append(
when (filter.sortOrder) {
SortOrder.POPULARITY -> "views_d000"
SortOrder.UPDATED -> "field_update"
SortOrder.NEWEST -> "field_create"
SortOrder.ALPHABETICAL -> "field_name"
SortOrder.RATING -> "field_score"
else -> ""
append("&genres=")
filter.tags.joinTo(this, ",") { it.key }
append("|")
filter.tagsExclude.joinTo(this, ",") { it.key }
if (filter.contentRating.isNotEmpty()) {
filter.contentRating.oneOrThrowIfMany()?.let {
append(
when (it) {
ContentRating.SAFE -> append(",gore,bloody,violence,ecchi,adult,mature,smut,hentai")
else -> append("")
},
)
filter.locale?.let {
append("&lang=")
append(it.language)
}
}
}
filter.states.oneOrThrowIfMany()?.let {
append("&status=")
append(
when (it) {
MangaState.ONGOING -> "ongoing"
MangaState.FINISHED -> "completed"
MangaState.PAUSED -> "hiatus"
MangaState.ABANDONED -> "cancelled"
MangaState.UPCOMING -> "pending"
else -> throw IllegalArgumentException("$it not supported")
},
)
}
append("&sortby=")
append(
when (order) {
SortOrder.POPULARITY -> "views_d000"
SortOrder.UPDATED -> "field_update"
SortOrder.NEWEST -> "field_create"
SortOrder.ALPHABETICAL -> "field_name"
SortOrder.RATING -> "field_score"
else -> ""
},
)
filter.locale?.let {
append("&lang=")
append(it.language)
}
null -> append("&sortby=field_update")
filter.originalLocale?.let {
append("&orig=")
append(it.language)
}
}
@ -110,22 +162,20 @@ internal class MangaPark(context: MangaLoaderContext) :
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirst("img")?.src().orEmpty(),
coverUrl = div.selectFirst("img")?.src(),
title = div.selectFirst("h3")?.text().orEmpty(),
altTitle = null,
altTitles = emptySet(),
rating = div.selectFirst("span.text-yellow-500")?.text()?.toFloatOrNull()?.div(10F) ?: RATING_UNKNOWN,
tags = emptySet(),
author = null,
authors = emptySet(),
state = null,
source = source,
isNsfw = isNsfwSource,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
override suspend fun getAvailableTags(): Set<MangaTag> {
return tagsMap.get().values.toSet()
}
private val tagsMap = suspendLazy(initializer = ::parseTags)
private suspend fun parseTags(): Map<String, MangaTag> {
val tagElements = webClient.httpGet("https://$domain/search").parseHtml()
@ -143,29 +193,6 @@ internal class MangaPark(context: MangaLoaderContext) :
return tagMap
}
override suspend fun getAvailableLocales(): Set<Locale> = setOf(
Locale("af"), Locale("sq"), Locale("am"), Locale("ar"), Locale("hy"),
Locale("az"), Locale("be"), Locale("bn"), Locale("zh_hk"), Locale("zh_tw"),
Locale.CHINESE, Locale("ceb"), Locale("ca"), Locale("km"), Locale("my"),
Locale("bg"), Locale("bs"), Locale("hr"), Locale("cs"), Locale("da"),
Locale("nl"), Locale.ENGLISH, Locale("et"), Locale("fo"), Locale("fil"),
Locale("fi"), Locale("he"), Locale("ha"), Locale("jv"), Locale("lb"),
Locale("mn"), Locale("ro"), Locale("si"), Locale("ta"), Locale("uz"),
Locale("ur"), Locale("tg"), Locale("sd"), Locale("pt_br"), Locale("mo"),
Locale("lt"), Locale.JAPANESE, Locale.ITALIAN, Locale("ht"), Locale("lv"),
Locale("mr"), Locale("pt"), Locale("sn"), Locale("sv"), Locale("uk"),
Locale("tk"), Locale("sw"), Locale("st"), Locale("pl"), Locale("mi"),
Locale("lo"), Locale("ga"), Locale("gu"), Locale("gn"), Locale("id"),
Locale("ky"), Locale("mt"), Locale("fa"), Locale("sh"), Locale("es_419"),
Locale("tr"), Locale("to"), Locale("vi"), Locale("es"), Locale("sr"),
Locale("ps"), Locale("ml"), Locale("ku"), Locale("ig"), Locale("el"),
Locale.GERMAN, Locale("is"), Locale.KOREAN, Locale("ms"), Locale("ny"), Locale("sm"),
Locale("so"), Locale("ti"), Locale("zu"), Locale("yo"), Locale("th"),
Locale("sl"), Locale("ru"), Locale("no"), Locale("mg"), Locale("kk"),
Locale("hu"), Locale("ka"), Locale.FRENCH, Locale("hi"), Locale("kn"),
Locale("mk"), Locale("ne"), Locale("rm"), Locale("sk"), Locale("te"),
)
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val tagMap = tagsMap.get()
@ -173,10 +200,11 @@ internal class MangaPark(context: MangaLoaderContext) :
val tags = selectTag.mapNotNullToSet { tagMap[it.text()] }
val nsfw = tags.any { t -> t.key == "hentai" || t.key == "adult" }
val dateFormat = SimpleDateFormat("dd/MM/yyyy", sourceLocale)
val author = doc.selectFirst("div[q:key=tz_4]")?.textOrNull()
manga.copy(
altTitle = doc.selectFirst("div[q:key=tz_2]")?.text().orEmpty(),
author = doc.selectFirst("div[q:key=tz_4]")?.text().orEmpty(),
description = doc.selectFirst("react-island[q:key=0a_9]")?.html().orEmpty(),
altTitles = setOfNotNull(doc.selectFirst("div[q:key=tz_2]")?.textOrNull()),
authors = setOfNotNull(author),
description = doc.selectFirst("react-island[q:key=0a_9]")?.html(),
state = when (doc.selectFirst("span[q:key=Yn_5]")?.text()?.lowercase()) {
"ongoing" -> MangaState.ONGOING
"completed" -> MangaState.FINISHED
@ -185,15 +213,16 @@ internal class MangaPark(context: MangaLoaderContext) :
else -> null
},
tags = tags,
isNsfw = nsfw,
chapters = doc.body().select("div.group.flex div.px-2").mapChapters { i, div ->
contentRating = if (nsfw) ContentRating.ADULT else ContentRating.SAFE,
chapters = doc.body().select("div.group.flex div.px-2").mapChapters(reversed = true) { i, div ->
val a = div.selectFirstOrThrow("a")
val href = a.attrAsRelativeUrl("href")
val dateText = div.selectFirst("span[q:key=Ee_0]")?.text()
MangaChapter(
id = generateUid(href),
name = a.text(),
number = i + 1,
title = a.textOrNull(),
number = i + 1f,
volume = 0,
url = href,
uploadDate = parseChapterDate(
dateFormat,
@ -210,15 +239,20 @@ internal class MangaPark(context: MangaLoaderContext) :
private fun parseChapterDate(dateFormat: DateFormat, date: String?): Long {
val d = date?.lowercase() ?: return 0
return when {
d.endsWith(" ago") -> parseRelativeDate(date)
d.startsWith("just now") -> Calendar.getInstance().apply {
set(Calendar.HOUR_OF_DAY, 0)
set(Calendar.MINUTE, 0)
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
else -> dateFormat.tryParse(date)
WordSet(" ago").endsWith(d) -> {
parseRelativeDate(d)
}
WordSet("just now").startsWith(d) -> {
Calendar.getInstance().apply {
set(Calendar.HOUR_OF_DAY, 0)
set(Calendar.MINUTE, 0)
set(Calendar.SECOND, 0)
set(Calendar.MILLISECOND, 0)
}.timeInMillis
}
else -> dateFormat.parseSafe(date)
}
}
@ -226,18 +260,24 @@ internal class MangaPark(context: MangaLoaderContext) :
val number = Regex("""(\d+)""").find(date)?.value?.toIntOrNull() ?: return 0
val cal = Calendar.getInstance()
return when {
WordSet("second").anyWordIn(date) -> cal.apply { add(Calendar.SECOND, -number) }.timeInMillis
WordSet("minute", "minutes", "mins", "min").anyWordIn(date) -> cal.apply {
add(
Calendar.MINUTE,
-number,
)
}.timeInMillis
WordSet("second")
.anyWordIn(date) -> cal.apply { add(Calendar.SECOND, -number) }.timeInMillis
WordSet("minute", "minutes", "mins", "min")
.anyWordIn(date) -> cal.apply { add(Calendar.MINUTE, -number) }.timeInMillis
WordSet("hour", "hours")
.anyWordIn(date) -> cal.apply { add(Calendar.HOUR, -number) }.timeInMillis
WordSet("day", "days")
.anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
WordSet("month", "months")
.anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
WordSet("year")
.anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
WordSet("hour", "hours").anyWordIn(date) -> cal.apply { add(Calendar.HOUR, -number) }.timeInMillis
WordSet("day", "days").anyWordIn(date) -> cal.apply { add(Calendar.DAY_OF_MONTH, -number) }.timeInMillis
WordSet("month", "months").anyWordIn(date) -> cal.apply { add(Calendar.MONTH, -number) }.timeInMillis
WordSet("year").anyWordIn(date) -> cal.apply { add(Calendar.YEAR, -number) }.timeInMillis
else -> 0
}
}
@ -245,24 +285,32 @@ internal class MangaPark(context: MangaLoaderContext) :
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
val script = if (doc.selectFirst("script:containsData(comic-)") != null) {
doc.selectFirstOrThrow("script:containsData(comic-)").data()
.substringAfterLast("\"comic-").split("\",\"")
val id = chapter.url.removeSuffix('/').substringAfterLast('/').substringBefore('-')
val s = doc.selectFirstOrThrow("script:containsData($id)").data()
val script = if (s.contains("\"comic-")) {
s.substringAfterLast("\"comic-")
} else {
doc.selectFirstOrThrow("script:containsData(manga-)").data()
.substringAfterLast("\"manga-").split("\",\"")
s.substringAfterLast("\"manga-")
}
return script.mapNotNull { url ->
if (!url.startsWith("https://")) {
return@mapNotNull null
} else {
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
return Regex("\"(https?:.+?)\"")
.findAll(script)
.mapIndexedNotNullTo(ArrayList()) { i, it ->
val url = it.groupValues.getOrNull(1) ?: return@mapIndexedNotNullTo null
if (url.contains(".jpg") || url.contains(".jpeg") || url.contains(".jfif") || url.contains(".pjpeg") ||
url.contains(".pjp") || url.contains(".png") || url.contains(".webp") || url.contains(".avif") ||
url.contains(".gif")
) {
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
} else {
return@mapIndexedNotNullTo null
}
}
}
}
}

@ -9,53 +9,58 @@ import okhttp3.ResponseBody.Companion.toResponseBody
import org.json.JSONArray
import org.json.JSONObject
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.SinglePageMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.asTypedList
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
import org.koitharu.kotatsu.parsers.util.json.mapJSON
import org.koitharu.kotatsu.parsers.util.json.mapJSONNotNull
import org.koitharu.kotatsu.parsers.util.json.toJSONList
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.util.*
internal abstract class MangaPlusParser(
context: MangaLoaderContext,
source: MangaSource,
source: MangaParserSource,
private val sourceLang: String,
) : MangaParser(context, source), Interceptor {
) : SinglePageMangaParser(context, source), Interceptor {
private val apiUrl = "https://jumpg-webapi.tokyo-cdn.com/api"
override val configKeyDomain = ConfigKey.Domain("mangaplus.shueisha.co.jp")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.POPULARITY,
SortOrder.UPDATED,
SortOrder.ALPHABETICAL,
)
private val extraHeaders = Headers.headersOf("Session-Token", UUID.randomUUID().toString())
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
// no tags or tag search available
override suspend fun getAvailableTags(): Set<MangaTag> = emptySet()
override suspend fun getFilterOptions() = MangaListFilterOptions()
override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
if (offset > 0) {
return emptyList()
}
private val extraHeaders = Headers.headersOf("Session-Token", UUID.randomUUID().toString())
return when (filter) {
is MangaListFilter.Advanced -> {
when (filter.sortOrder) {
override suspend fun getList(order: SortOrder, filter: MangaListFilter): List<Manga> {
return when {
filter.query.isNullOrEmpty() -> {
when (order) {
SortOrder.POPULARITY -> getPopularList()
SortOrder.UPDATED -> getLatestList()
else -> getAllTitleList()
}
}
is MangaListFilter.Search -> getAllTitleList(filter.query)
else -> getAllTitleList()
else -> getAllTitleList(filter.query)
}
}
@ -64,7 +69,7 @@ internal abstract class MangaPlusParser(
return json.getJSONObject("titleRankingView")
.getJSONArray("titles")
.toJSONList()
.asTypedList<JSONObject>()
.toMangaList()
}
@ -78,11 +83,11 @@ internal abstract class MangaPlusParser(
}
// since search is local, save network calls on related manga call
private val allTitleCache = SuspendLazy {
private val allTitleCache = suspendLazy {
apiCall("/title_list/allV2")
.getJSONObject("allTitlesViewV2")
.getJSONArray("AllTitlesGroup")
.mapJSON { it.getJSONArray("titles").toJSONList() }
.mapJSON { it.getJSONArray("titles").asTypedList<JSONObject>() }
.flatten()
}
@ -116,9 +121,9 @@ internal abstract class MangaPlusParser(
publicUrl = "/titles/$titleId".toAbsoluteUrl(domain),
title = name,
coverUrl = it.getString("portraitImageUrl"),
altTitle = null,
author = author,
isNsfw = false,
altTitles = emptySet(),
authors = setOf(author),
contentRating = null,
rating = RATING_UNKNOWN,
state = null,
source = source,
@ -138,13 +143,14 @@ internal abstract class MangaPlusParser(
}
val hiatus = json.getStringOrNull("nonAppearanceInfo")?.contains("on a hiatus") == true
val author = title.getString("author")
.split("/").joinToString(transform = String::trim)
return manga.copy(
title = title.getString("name"),
publicUrl = "/titles/${title.getInt("titleId")}".toAbsoluteUrl(domain),
coverUrl = title.getString("portraitImageUrl"),
author = title.getString("author")
.split("/").joinToString(transform = String::trim),
authors = setOf(author),
description = buildString {
json.getString("overview").let(::append)
json.getStringOrNull("viewingPeriodDescription")
@ -165,10 +171,10 @@ internal abstract class MangaPlusParser(
private fun parseChapters(chapterListGroup: JSONArray, language: String): List<MangaChapter> {
val chapterList = chapterListGroup
.toJSONList()
.asTypedList<JSONObject>()
.flatMap {
it.optJSONArray("firstChapterList")?.toJSONList().orEmpty() +
it.optJSONArray("lastChapterList")?.toJSONList().orEmpty()
it.optJSONArray("firstChapterList")?.asTypedList<JSONObject>().orEmpty() +
it.optJSONArray("lastChapterList")?.asTypedList<JSONObject>().orEmpty()
}
return chapterList.mapChapters { _, chapter ->
@ -178,10 +184,11 @@ internal abstract class MangaPlusParser(
MangaChapter(
id = generateUid(chapterId),
url = chapterId,
name = subtitle,
title = subtitle,
number = chapter.getString("name")
.substringAfter("#")
.toIntOrNull() ?: -1,
.toFloatOrNull() ?: -1f,
volume = 0,
uploadDate = chapter.getInt("startTimeStamp") * 1000L,
branch = when (language) {
"PORTUGUESE_BR" -> "Portuguese (Brazil)"
@ -222,14 +229,11 @@ internal abstract class MangaPlusParser(
return response
}
val contentType = response.headers["Content-Type"] ?: "image/jpeg"
val image = requireNotNull(response.body).bytes().decodeXorCipher(encryptionKey)
val body = image.toResponseBody(contentType.toMediaTypeOrNull())
return response.newBuilder()
.body(body)
.build()
return response.map { responseBody ->
val contentType = response.headers["Content-Type"] ?: "image/jpeg"
val image = responseBody.bytes().decodeXorCipher(encryptionKey)
image.toResponseBody(contentType.toMediaTypeOrNull())
}
}
private fun ByteArray.decodeXorCipher(key: String): ByteArray {
@ -252,7 +256,7 @@ internal abstract class MangaPlusParser(
return checkNotNull(success) {
val error = response.getJSONObject("error")
val reason = error.getJSONArray("popups")
.toJSONList()
.asTypedList<JSONObject>()
.firstOrNull { it.getStringOrNull("language") == null }
if (reason?.getStringOrNull("subject") == "Not Found" && url.contains("manga_viewer")) {
@ -266,56 +270,63 @@ internal abstract class MangaPlusParser(
@MangaSourceParser("MANGAPLUSPARSER_EN", "MANGA Plus English", "en")
class English(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_EN,
MangaParserSource.MANGAPLUSPARSER_EN,
"ENGLISH",
)
@MangaSourceParser("MANGAPLUSPARSER_ES", "MANGA Plus Spanish", "es")
class Spanish(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_ES,
MangaParserSource.MANGAPLUSPARSER_ES,
"SPANISH",
)
@MangaSourceParser("MANGAPLUSPARSER_FR", "MANGA Plus French", "fr")
class French(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_FR,
MangaParserSource.MANGAPLUSPARSER_FR,
"FRENCH",
)
@MangaSourceParser("MANGAPLUSPARSER_ID", "MANGA Plus Indonesian", "id")
class Indonesian(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_ID,
MangaParserSource.MANGAPLUSPARSER_ID,
"INDONESIAN",
)
@MangaSourceParser("MANGAPLUSPARSER_PTBR", "MANGA Plus Portuguese (Brazil)", "pt")
class Portuguese(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_PTBR,
MangaParserSource.MANGAPLUSPARSER_PTBR,
"PORTUGUESE_BR",
)
@MangaSourceParser("MANGAPLUSPARSER_RU", "MANGA Plus Russian", "ru")
class Russian(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_RU,
MangaParserSource.MANGAPLUSPARSER_RU,
"RUSSIAN",
)
@MangaSourceParser("MANGAPLUSPARSER_TH", "MANGA Plus Thai", "th")
class Thai(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_TH,
MangaParserSource.MANGAPLUSPARSER_TH,
"THAI",
)
@MangaSourceParser("MANGAPLUSPARSER_VI", "MANGA Plus Vietnamese", "vi")
class Vietnamese(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaSource.MANGAPLUSPARSER_VI,
MangaParserSource.MANGAPLUSPARSER_VI,
"VIETNAMESE",
)
@MangaSourceParser("MANGAPLUSPARSER_DE", "MANGA Plus German", "de")
class German(context: MangaLoaderContext) : MangaPlusParser(
context,
MangaParserSource.MANGAPLUSPARSER_DE,
"GERMAN",
)
}

@ -0,0 +1,390 @@
package org.koitharu.kotatsu.parsers.site.all
import androidx.collection.MutableIntObjectMap
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.Interceptor
import okhttp3.Response
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParserAuthProvider
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.bitmap.Bitmap
import org.koitharu.kotatsu.parsers.bitmap.Rect
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.util.*
import javax.crypto.Cipher
import javax.crypto.spec.SecretKeySpec
import kotlin.math.min
@MangaSourceParser("MANGAREADERTO", "MangaReader.To")
internal class MangaReaderToParser(context: MangaLoaderContext) :
PagedMangaParser(context, MangaParserSource.MANGAREADERTO, 16),
Interceptor, MangaParserAuthProvider {
override val configKeyDomain = ConfigKey.Domain("mangareader.to")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val authUrl: String
get() = "https://${domain}/home"
override suspend fun isAuthorized(): Boolean {
return context.cookieJar.getCookies(domain).any {
it.name.contains("connect.sid")
}
}
// It will be easier to connect to a manga page, as the source redirects to a lot of advertising.
override suspend fun getUsername(): String {
val body = webClient.httpGet("https://${domain}/user/profile").parseHtml().body()
return body.getElementById("pro5-name")?.attr("value") ?: body.parseFailed("Cannot find username")
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.POPULARITY,
SortOrder.RATING,
SortOrder.UPDATED,
SortOrder.NEWEST,
SortOrder.ALPHABETICAL,
)
val tags = suspendLazy(soft = true) {
val document = webClient.httpGet("https://$domain/filter").parseHtml()
document.select("div.f-genre-item").map {
MangaTag(
title = it.ownText().toTitleCase(sourceLocale),
key = it.attr("data-id"),
source = source,
)
}.associateBy { it.title }
}
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = tags.get().values.toSet(),
availableStates = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
MangaState.ABANDONED,
MangaState.PAUSED,
MangaState.UPCOMING,
),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = "https://$domain".toHttpUrl().newBuilder().apply {
when {
!filter.query.isNullOrEmpty() -> {
addPathSegment("search")
addQueryParameter("keyword", filter.query)
addQueryParameter("page", page.toString())
}
else -> {
addPathSegment("filter")
addQueryParameter("page", page.toString())
addQueryParameter(
name = "sort",
value = when (order) {
SortOrder.POPULARITY -> "most-viewed"
SortOrder.RATING -> "score"
SortOrder.UPDATED -> "latest-updated"
SortOrder.NEWEST -> "release-date"
SortOrder.ALPHABETICAL -> "name-az"
else -> "default"
},
)
addQueryParameter("genres", filter.tags.joinToString(",") { it.key })
addQueryParameter(
name = "status",
value = when (val state = filter.states.oneOrThrowIfMany()) {
MangaState.ONGOING -> "2"
MangaState.FINISHED -> "1"
MangaState.ABANDONED -> "4"
MangaState.PAUSED -> "3"
MangaState.UPCOMING -> "5"
null -> ""
else -> throw IllegalArgumentException("$state not supported")
},
)
}
}
}.build()
val document = webClient.httpGet(url).parseHtml()
return document.select(".manga_list-sbs .manga-poster").map {
val mangaUrl = it.attrAsRelativeUrl("href")
val thumb = it.select("img")
Manga(
id = generateUid(mangaUrl),
url = mangaUrl,
publicUrl = mangaUrl.toAbsoluteUrl(domain),
title = thumb.attr("alt"),
coverUrl = thumb.attr("src"),
source = source,
altTitles = emptySet(),
authors = emptySet(),
contentRating = null,
rating = RATING_UNKNOWN,
state = null,
tags = emptySet(),
)
}
}
override suspend fun getRelatedManga(seed: Manga): List<Manga> {
val document = webClient.httpGet(seed.url.toAbsoluteUrl(domain)).parseHtml()
return document.select(".block_area_authors-other .manga_list-sbs .manga-poster, .featured-block-ul .manga-poster")
.map {
val mangaUrl = it.attrAsRelativeUrl("href")
val thumb = it.selectFirstOrThrow("img")
Manga(
id = generateUid(mangaUrl),
url = mangaUrl,
publicUrl = mangaUrl.toAbsoluteUrl(domain),
title = thumb.attr("alt"),
coverUrl = thumb.attrAsAbsoluteUrlOrNull("src"),
source = source,
altTitles = emptySet(),
authors = emptySet(),
contentRating = null,
rating = RATING_UNKNOWN,
state = null,
tags = emptySet(),
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val document = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val availableTags = tags.get()
var isAdult = false
var isSuggestive = false
val author = document.select("div.anisc-info a[href*=/author/]")
.joinToString { it.ownText().replace(", ", " ") }.nullIfEmpty()
return manga.copy(
title = document.selectFirst("h2.manga-name")!!.ownText(),
altTitles = setOfNotNull(document.selectFirst("div.manga-name-or")?.ownTextOrNull()),
rating = document.selectFirst("div.anisc-info .item:contains(score:) > .name")
?.text()?.toFloatOrNull()?.div(10) ?: RATING_UNKNOWN,
coverUrl = document.selectFirst(".manga-poster > img")?.attrAsAbsoluteUrlOrNull("src"),
tags = document.select("div.genres > a[href*=/genre/]").mapNotNullToSet {
val tag = it.ownText()
if (tag == "Hentai") {
isAdult = true
} else if (tag == "Ecchi") {
isSuggestive = true
}
availableTags[tag]
},
contentRating = when {
isAdult -> ContentRating.ADULT
isSuggestive -> ContentRating.SUGGESTIVE
else -> ContentRating.SAFE
},
state = document.selectFirst("div.anisc-info .item:contains(status:) > .name")
?.text()?.let {
when (it) {
"Publishing" -> MangaState.ONGOING
"Finished" -> MangaState.FINISHED
"On Hiatus" -> MangaState.PAUSED
"Discontinued" -> MangaState.ABANDONED
"Not yet published" -> MangaState.UPCOMING
else -> null
}
},
authors = setOfNotNull(author),
description = document.select("div.description").html(),
chapters = parseChapters(document),
source = source,
)
}
private fun parseChapters(document: Document): List<MangaChapter> {
val total =
document.select(".chapters-list-ul > ul > li.chapter-item, .volume-list-ul div.lang-volumes > div.item").size
val chapters = ChaptersListBuilder(total)
document.select(".chapters-list-ul > ul").forEach { ul ->
ul.select("li.chapter-item").reversed().forEach { li ->
val a = li.selectFirst("a")!!
chapters.add(
MangaChapter(
id = generateUid(a.attrAsRelativeUrl("href")),
title = a.attrOrNull("title"),
number = li.attr("data-number").toFloat(),
volume = 0,
url = a.attrAsRelativeUrl("href"),
scanlator = null,
uploadDate = 0L,
branch = createBranchName(ul.id().substringBefore("-chapters"), "Chapters"),
source = source,
),
)
}
}
val numRegex = Regex("""(\d+)""")
document.select(".volume-list-ul div.lang-volumes").forEach { div ->
div.select("div.item > div.manga-poster").reversed().forEach { vol ->
val url = vol.selectFirst("a")!!.attrAsRelativeUrl("href")
val name = vol.selectFirst("span")!!.ownText()
chapters.add(
MangaChapter(
id = generateUid(url),
title = name,
number = numRegex.find(name)?.groupValues?.getOrNull(1)?.toFloatOrNull() ?: 0f,
volume = 0,
url = url,
scanlator = null,
uploadDate = 0L,
branch = createBranchName(div.id().substringBefore("-volumes"), "Volumes"),
source = source,
),
)
}
}
return chapters.toList()
}
private fun createBranchName(lang: String, type: String): String {
val langCode = lang.substringBefore("-")
return Locale(langCode).displayLanguage + " " + type
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val typeAndId = webClient.httpGet(chapter.url.toAbsoluteUrl(domain))
.parseHtml()
.selectFirst("#wrapper")!!.run {
"${attr("data-reading-by")}/${attr("data-reading-id")}"
}
val document = webClient.httpGet("https://$domain/ajax/image/list/$typeAndId?quality=high")
.parseJson()
.getString("html")
.let(Jsoup::parse)
return document.select(".iv-card").map {
val url = it.attr("data-url")
MangaPage(
id = generateUid(url),
url = if (it.hasClass("shuffled")) {
"$url#scrambled"
} else {
url
},
preview = null,
source = source,
)
}
}
override fun intercept(chain: Interceptor.Chain): Response {
val request = chain.request()
val response = chain.proceed(request)
if (request.url.fragment != "scrambled") return response
return context.redrawImageResponse(response, ::descramble)
}
private val memo = MutableIntObjectMap<IntArray>()
private fun descramble(bitmap: Bitmap): Bitmap = synchronized(memo) {
val width = bitmap.width
val height = bitmap.height
val result = context.createBitmap(width, height)
val pieces = ArrayList<Piece>()
for (y in 0 until height step PIECE_SIZE) {
for (x in 0 until width step PIECE_SIZE) {
val w = min(PIECE_SIZE, width - x)
val h = min(PIECE_SIZE, height - y)
pieces.add(Piece(x, y, w, h))
}
}
val groups = pieces.groupBy { it.w shl 16 or it.h }
for (group in groups.values) {
val size = group.size
val permutation = memo.getOrPut(size) {
val random = SeedRandom("staystay")
// https://github.com/webcaetano/shuffle-seed
val indices = (0 until size).toMutableList()
IntArray(size) { indices.removeAt((random.nextDouble() * indices.size).toInt()) }
}
for ((i, original) in permutation.withIndex()) {
val src = group[i]
val dst = group[original]
val srcRect = Rect(src.x, src.y, src.x + src.w, src.y + src.h)
val dstRect = Rect(dst.x, dst.y, dst.x + dst.w, dst.y + dst.h)
result.drawBitmap(bitmap, srcRect, dstRect)
}
}
return result
}
private class Piece(val x: Int, val y: Int, val w: Int, val h: Int)
// https://github.com/davidbau/seedrandom
private class SeedRandom(key: String) {
private val input = ByteArray(RC4_WIDTH)
private val buffer = ByteArray(RC4_WIDTH)
private var pos = RC4_WIDTH
private val rc4 = Cipher.getInstance("RC4").apply {
init(Cipher.ENCRYPT_MODE, SecretKeySpec(key.toByteArray(), "RC4"))
update(input, 0, RC4_WIDTH, buffer) // RC4-drop[256]
}
fun nextDouble(): Double {
var num = nextByte()
var exp = 8
while (num < 1L shl 52) {
num = num shl 8 or nextByte()
exp += 8
}
while (num >= 1L shl 53) {
num = num ushr 1
exp--
}
return Math.scalb(num.toDouble(), -exp)
}
private fun nextByte(): Long {
if (pos == RC4_WIDTH) {
rc4.update(input, 0, RC4_WIDTH, buffer)
pos = 0
}
return buffer[pos++].toLong() and 0xFF
}
}
}
private const val RC4_WIDTH = 256
private const val PIECE_SIZE = 200

@ -0,0 +1,228 @@
package org.koitharu.kotatsu.parsers.site.all
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.util.*
@MangaSourceParser("MANHWA210", "Manhwa210", type = ContentType.MANHWA)
internal class Manhwa210(context: MangaLoaderContext) : PagedMangaParser(context, MangaParserSource.MANHWA210, 60) {
override val configKeyDomain = ConfigKey.Domain("manhwa210.com")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.ALPHABETICAL,
SortOrder.ALPHABETICAL_DESC,
SortOrder.UPDATED,
SortOrder.NEWEST,
SortOrder.POPULARITY,
)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = availableTags(),
availableStates = EnumSet.of(MangaState.ONGOING, MangaState.FINISHED),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/search")
append("?filter[name]=")
append(filter.query.urlEncoded())
if (page > 1) {
append("&page=")
append(page)
}
append("&sort=")
append(
when (order) {
SortOrder.POPULARITY -> "-views"
SortOrder.UPDATED -> "-updated_at"
SortOrder.NEWEST -> "-created_at"
SortOrder.ALPHABETICAL -> "name"
SortOrder.ALPHABETICAL_DESC -> "-name"
else -> "-updated_at"
},
)
}
filter.tags.isNotEmpty() -> {
val tag = filter.tags.first()
append("/genre/")
append(tag.key)
append("?page=")
append(page)
}
else -> {
append("/list")
append("?sort=")
append(
when (order) {
SortOrder.POPULARITY -> "-views"
SortOrder.UPDATED -> "-updated_at"
SortOrder.NEWEST -> "-created_at"
SortOrder.ALPHABETICAL -> "name"
SortOrder.ALPHABETICAL_DESC -> "-name"
else -> "-updated_at"
},
)
append("&page=")
append(page)
}
}
if (filter.query.isNullOrEmpty()) {
append("&sort=")
when (order) {
SortOrder.POPULARITY -> append("-views")
SortOrder.UPDATED -> append("-updated_at")
SortOrder.NEWEST -> append("-created_at")
SortOrder.ALPHABETICAL -> append("name")
SortOrder.ALPHABETICAL_DESC -> append("-name")
else -> append("-updated_at")
}
}
if (filter.states.isNotEmpty()) {
append("&filter[status]=")
filter.states.forEach {
append(
when (it) {
MangaState.ONGOING -> "2,"
MangaState.FINISHED -> "1,"
else -> "1,2"
},
)
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select("div.grid div.relative").map { div ->
val href = div.selectFirst("a[href^=/manga/]")?.attrOrNull("href")
?: div.parseFailed("Cant find manga image!")
val coverUrl = div.selectFirst("div.cover")?.attr("style")
?.substringAfter("url('")?.substringBefore("')")
Manga(
id = generateUid(href),
title = div.select("div.p-2 a.text-ellipsis").text(),
altTitles = emptySet(),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = ContentRating.ADULT,
coverUrl = coverUrl.orEmpty(),
tags = setOf(),
state = null,
authors = emptySet(),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val root = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val author = root.selectFirst("div.mt-2:contains(Artist) span a")?.textOrNull()
return manga.copy(
altTitles = setOfNotNull(root.selectLast("div.grow div:contains(Alt name) span")?.textOrNull()),
state = when (root.selectFirst("div.mt-2:contains(Status) span.text-blue-500")?.text()) {
"Ongoing" -> MangaState.ONGOING
"Completed" -> MangaState.FINISHED
else -> null
},
tags = root.select("div.mt-2:contains(Genres) a.bg-gray-500").mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text(),
source = source,
)
},
authors = setOfNotNull(author),
description = root.selectFirst("meta[name=description]")?.attrOrNull("content"),
chapters = root.select("div.justify-between ul.overflow-y-auto.overflow-x-hidden a")
.mapChapters(reversed = true) { i, a ->
val href = a.attrAsRelativeUrl("href")
val name = a.selectFirst("span.text-ellipsis")?.text().orEmpty()
val dateText = a.parent()?.selectFirst("span.timeago")?.attr("datetime").orEmpty()
MangaChapter(
id = generateUid(href),
title = name,
number = i.toFloat(),
volume = 0,
url = href,
scanlator = null,
uploadDate = parseDateTime(dateText),
branch = null,
source = source,
)
},
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val fullUrl = chapter.url.toAbsoluteUrl(domain)
val doc = webClient.httpGet(fullUrl).parseHtml()
return doc.select("div.text-center img.lazy").mapNotNull { img ->
val url = img.requireSrc()
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
private fun parseDateTime(dateStr: String): Long = runCatching {
val parts = dateStr.split(' ')
val dateParts = parts[0].split('-')
val timeParts = parts[1].split(':')
val calendar = Calendar.getInstance()
calendar.set(
dateParts[0].toInt(),
dateParts[1].toInt() - 1,
dateParts[2].toInt(),
timeParts[0].toInt(),
timeParts[1].toInt(),
timeParts[2].toInt(),
)
calendar.timeInMillis
}.getOrDefault(0L)
private suspend fun availableTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain").parseHtml()
return doc.select("ul.grid.grid-cols-2 a").mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text(),
source = source,
)
}
}
}

@ -0,0 +1,148 @@
package org.koitharu.kotatsu.parsers.site.all
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.util.*
@MangaSourceParser("MISSKON", "MissKon", type = ContentType.OTHER)
internal class Misskon(context: MangaLoaderContext) : PagedMangaParser(context, MangaParserSource.MISSKON, 24) {
override val configKeyDomain = ConfigKey.Domain("misskon.com")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
SortOrder.POPULARITY
)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities( isSearchSupported = true )
override suspend fun getFilterOptions() = MangaListFilterOptions()
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/page/$page/")
append("?s=")
append(filter.query.urlEncoded())
}
order == SortOrder.POPULARITY -> {
append("/top3/")
}
else -> {
append("/page/$page")
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select("article.item-list").map { article ->
val titleEl = article.selectFirst(".post-box-title")!!
val href = titleEl.selectFirst("a")?.attrAsRelativeUrl("href")
?: article.parseFailed("Cannot find manga link")
Manga(
id = generateUid(href),
title = titleEl.text(),
altTitles = emptySet(),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = ContentRating.ADULT,
coverUrl = article.selectFirst(".post-thumbnail img")?.absUrl("data-src").orEmpty(),
tags = setOf(),
state = null,
authors = emptySet(),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val postInnerEl = doc.selectFirst("article > .post-inner")!!
return manga.copy(
tags = postInnerEl.select(".post-tag > a").mapToSet { a ->
MangaTag(
key = a.text().lowercase(),
title = a.text(),
source = source
)
},
chapters = listOf(
MangaChapter(
id = manga.id,
title = "Oneshot", // 1 album, idk
number = 1f,
volume = 0,
url = manga.url,
scanlator = null,
uploadDate = 0L,
branch = null,
source = source
)
)
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
val basePageUrl = doc.selectFirst("link[rel=canonical]")?.absUrl("href")
?: chapter.url.toAbsoluteUrl(domain)
val pages = mutableListOf<MangaPage>()
val pageLinks = doc.select("div.post-inner div.page-link:nth-child(1) .post-page-numbers")
if (pageLinks.isEmpty()) {
// Single page gallery
return doc.select("div.post-inner > div.entry > p > img")
.mapNotNull { img -> img.absUrl("data-src") }
.mapIndexed { i, url ->
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source
)
}
}
// Multi-page gallery
pageLinks.forEachIndexed { index, pageEl ->
val pageDoc = when (index) {
0 -> doc
else -> {
val url = "$basePageUrl${pageEl.text()}/"
webClient.httpGet(url).parseHtml()
}
}
pages.addAll(
pageDoc.select("div.post-inner > div.entry > p > img")
.mapNotNull { img -> img.absUrl("data-src") }
.map { url ->
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source
)
}
)
}
return pages
}
}

@ -0,0 +1,229 @@
package org.koitharu.kotatsu.parsers.site.all
import okhttp3.Headers
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.util.*
@MangaSourceParser("MULTPORN", "Multporn")
internal class Multporn(context: MangaLoaderContext) :
PagedMangaParser(context, MangaParserSource.MULTPORN, 42) {
override val configKeyDomain = ConfigKey.Domain("multporn.net")
override fun getRequestHeaders(): Headers = Headers.Builder()
.add("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36")
.add("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")
.build()
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.NEWEST,
SortOrder.NEWEST_ASC,
SortOrder.UPDATED,
SortOrder.UPDATED_ASC,
)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
init {
setFirstPage(0)
}
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableLocales = setOf(
Locale("en"),
Locale("de"),
Locale("ru"),
Locale("zh"),
Locale("es"),
),
availableContentTypes = EnumSet.of(
ContentType.COMICS,
ContentType.HENTAI,
),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when {
!filter.query.isNullOrEmpty() -> {
append("/search?search_api_views_fulltext=")
val encodedQuery = filter.query.splitByWhitespace().joinToString(separator = "+") { part ->
part.urlEncoded()
}
append(encodedQuery)
append("&undefined=Search")
append("&page=$page")
}
filter.tags.isNotEmpty() -> {
val tag = filter.tags.first()
append("/category/")
append(tag.key)
append("?sort_by=")
append(
when (order) {
SortOrder.NEWEST -> "created"
else -> "title" // default
}
)
append("&page=0,")
append(page)
}
else -> {
append("/new")
append("?type=")
if (filter.types.isNotEmpty()) {
filter.types.oneOrThrowIfMany()?.let {
append(
when (it) {
ContentType.COMICS -> "1"
ContentType.HENTAI -> "2"
else -> "All" // all
},
)
}
} else append("All")
filter.locale?.let {
append("&language=")
append(
when (it) {
Locale("en") -> "1"
Locale("de") -> "2"
Locale("ru") -> "3"
Locale("zh") -> "4"
Locale("es") -> "5"
else -> "All"
}
)
}
append("&field_user_discription_value=All")
append("&sort_by=")
append(
when (order) {
SortOrder.NEWEST -> "created&sort_order=DESC"
SortOrder.NEWEST_ASC -> "created&sort_order=ASC"
SortOrder.UPDATED -> "changed&sort_order=DESC"
SortOrder.UPDATED_ASC -> "changed&sort_order=ASC"
else -> "created&sort_order=DESC" // default
}
)
append("&undefined=Apply")
append("&page=$page")
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return doc.select(".masonry-item").map { div ->
val href = div.selectFirstOrThrow(".views-field-title a").attrAsRelativeUrl("href")
val coverUrl = div.selectFirstOrThrow(".views-field img").requireSrc()
Manga(
id = generateUid(href),
title = div.select(".views-field-title").text(),
altTitles = emptySet(),
url = href,
publicUrl = href.toAbsoluteUrl(domain),
rating = RATING_UNKNOWN,
contentRating = ContentRating.ADULT,
coverUrl = coverUrl,
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val authors = (doc.select(".field:has(.field-label:contains(Author:)) .links a").map { it.text() } +
parseUnlabelledAuthorNames(doc)).distinct()
val tags = listOf("Tags", "Section", "Characters")
.flatMap { type ->
doc.select(".field:has(.field-label:contains($type:)) .links a").map { it.text() }
}
.distinct()
.map { tag ->
MangaTag(
title = tag,
key = tag.lowercase().replace(" ", "_"),
source = source,
)
}.toSet()
val isOngoing = doc.select(".field .links a").any { it.text() == "Ongoings" }
return manga.copy(
authors = authors.toSet(),
tags = tags,
description = buildString {
append("Pages: ")
append(doc.select(".jb-image img").size)
append("\n\n")
doc.select(".field:has(.field-label:contains(Section:)) .links a").joinTo(this, prefix = "Section: ") { it.text() }
doc.select(".field:has(.field-label:contains(Characters:)) .links a").joinTo(this, prefix = "\n\nCharacters: ") { it.text() }
},
state = if (isOngoing) MangaState.ONGOING else MangaState.FINISHED,
chapters = listOf(
MangaChapter(
id = generateUid(manga.url),
title = "Oneshot",
number = 1f,
volume = 0,
url = manga.url,
scanlator = null,
uploadDate = 0L,
branch = null,
source = source,
)
),
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
return doc.select(".jb-image img").mapIndexed { i, img ->
val url = img.attrAsAbsoluteUrl("src")
.replace("/styles/juicebox_2k/public", "")
.substringBefore("?")
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
private fun parseUnlabelledAuthorNames(document: org.jsoup.nodes.Document): List<String> {
val authorClasses = listOf(
"field-name-field-author",
"field-name-field-authors-gr",
"field-name-field-img-group",
"field-name-field-hentai-img-group",
"field-name-field-rule-63-section"
)
return authorClasses.flatMap { className ->
document.select(".$className a").map { it.text().trim() }
}
}
}

@ -0,0 +1,451 @@
package org.koitharu.kotatsu.parsers.site.all
import org.jsoup.nodes.Document
import org.jsoup.nodes.Element
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.ContentRating
import org.koitharu.kotatsu.parsers.model.ContentType
import org.koitharu.kotatsu.parsers.model.Manga
import org.koitharu.kotatsu.parsers.model.MangaChapter
import org.koitharu.kotatsu.parsers.model.MangaListFilter
import org.koitharu.kotatsu.parsers.model.MangaListFilterCapabilities
import org.koitharu.kotatsu.parsers.model.MangaListFilterOptions
import org.koitharu.kotatsu.parsers.model.MangaPage
import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.model.MangaState
import org.koitharu.kotatsu.parsers.model.MangaTag
import org.koitharu.kotatsu.parsers.model.RATING_UNKNOWN
import org.koitharu.kotatsu.parsers.model.SortOrder
import org.koitharu.kotatsu.parsers.util.attrAsRelativeUrl
import org.koitharu.kotatsu.parsers.util.generateUid
import org.koitharu.kotatsu.parsers.util.mapToSet
import org.koitharu.kotatsu.parsers.util.parseHtml
import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl
import org.koitharu.kotatsu.parsers.util.urlEncoded
import java.text.SimpleDateFormat
import java.util.EnumSet
import java.util.Locale
import java.util.regex.Pattern
@MangaSourceParser("MYREADINGMANGA", "MyReadingManga", type = ContentType.HENTAI)
internal class MyReadingManga(context: MangaLoaderContext) :
PagedMangaParser(context, MangaParserSource.MYREADINGMANGA, 18) {
override val configKeyDomain = ConfigKey.Domain("myreadingmanga.info")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
isOriginalLocaleSupported = true,
)
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchTags(),
availableStates = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
),
availableContentRating = EnumSet.of(ContentRating.ADULT),
availableLocales = setOf(
Locale.ENGLISH,
Locale.FRENCH,
Locale.JAPANESE,
Locale.CHINESE,
Locale.GERMAN,
Locale.ITALIAN,
Locale.KOREAN,
Locale.TRADITIONAL_CHINESE,
Locale("es"), // Spanish
Locale("pt"), // Portuguese
Locale("ru"), // Russian
Locale("tr"), // Turkish
Locale("vi"), // Vietnamese
Locale("ar"), // Arabic
Locale("id"), // Indonesian (Bahasa)
Locale("th"), // Thai
Locale("pl"), // Polish
Locale("sv"), // Swedish
Locale("nl"), // Dutch (Flemish Dutch)
Locale("hu"), // Hungarian
Locale("hi"), // Hindi
Locale("he"), // Hebrew
Locale("el"), // Greek
Locale("fi"), // Finnish
Locale("fil"), // Filipino
Locale("da"), // Danish
Locale("cs"), // Czech
Locale("hr"), // Croatian
Locale("bg"), // Bulgarian
Locale("zh", "HK"), // Cantonese
Locale("fa"), // Persian
Locale("sk"), // Slovak
Locale("ro"), // Romanian
Locale("no"), // Norwegian
Locale("ms"), // Malay
Locale("lt"), // Lithuanian
),
)
private fun getLanguageSlug(locale: Locale?): String? {
return when {
locale?.language == "fr" -> "french"
locale?.language == "ja" -> "jp"
locale?.language == "zh" && locale.country == "TW" -> "traditional-chinese"
locale?.language == "zh" && locale.country == "HK" -> "cantonese"
locale?.language == "zh" -> "chinese"
locale?.language == "de" -> "german"
locale?.language == "it" -> "italian"
locale?.language == "ko" -> "korean"
locale?.language == "es" -> "spanish"
locale?.language == "pt" -> "portuguese"
locale?.language == "ru" -> "russian"
locale?.language == "tr" -> "turkish"
locale?.language == "vi" -> "vietnamese"
locale?.language == "ar" -> "arabic"
locale?.language == "id" -> "bahasa"
locale?.language == "th" -> "thai"
locale?.language == "pl" -> "polish"
locale?.language == "sv" -> "swedish"
locale?.language == "nl" -> "flemish-dutch"
locale?.language == "hu" -> "hungarian"
locale?.language == "hi" -> "hindi"
locale?.language == "he" -> "hebrew"
locale?.language == "el" -> "greek"
locale?.language == "fi" -> "finnish"
locale?.language == "fil" -> "filipino"
locale?.language == "da" -> "danish"
locale?.language == "cs" -> "czech"
locale?.language == "hr" -> "croatian"
locale?.language == "bg" -> "bulgarian"
locale?.language == "fa" -> "persian"
locale?.language == "sk" -> "slovak"
locale?.language == "ro" -> "romanian"
locale?.language == "no" -> "norwegian-bokmal"
locale?.language == "ms" -> "malay"
locale?.language == "lt" -> "lithuanian"
else -> null //all
}
}
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
// Add language path if specified
val langSlug = getLanguageSlug(filter.locale)
if (langSlug != null) {
append("/lang/")
append(langSlug)
}
when {
!filter.query.isNullOrEmpty() -> {
// Search with language: /lang/french/page/2/?s=example
if (page > 1) {
append("/page/")
append(page)
}
append("/?s=")
append(filter.query.urlEncoded())
}
filter.tags.isNotEmpty() -> {
// Genre filtering doesn't work with language, so we ignore language for genre
if (langSlug == null) {
append("/genre/")
append(filter.tags.first().key)
append("/page/")
append(page)
append("/")
} else {
// If both language and genre are selected, just use language
append("/page/")
append(page)
append("/")
}
}
filter.states.isNotEmpty() -> {
// Status filtering doesn't work with language either
if (langSlug == null) {
append("/status/")
append(
when (filter.states.first()) {
MangaState.ONGOING -> "ongoing"
MangaState.FINISHED -> "completed"
else -> "ongoing"
},
)
append("/page/")
append(page)
append("/")
} else {
// If both language and status are selected, just use language
append("/page/")
append(page)
append("/")
}
}
else -> {
// Regular browsing with or without language
append("/page/")
append(page)
append("/")
}
}
}
val doc = webClient.httpGet(url).parseHtml()
return parseMangaList(doc)
}
private fun parseMangaList(doc: Document): List<Manga> {
return doc.select("div.content-archive article.post:not(.category-video)").mapNotNull { element ->
val titleElement = element.selectFirst("h2.entry-title a") ?: return@mapNotNull null
val thumbnailElement = element.selectFirst("a.entry-image-link img")
Manga(
id = generateUid(titleElement.attr("href")),
title = titleElement.text().replace(titleRegex.toRegex(), "").substringBeforeLast("(").trim(),
altTitles = emptySet(),
url = titleElement.attrAsRelativeUrl("href"),
publicUrl = titleElement.absUrl("href"),
rating = RATING_UNKNOWN,
contentRating = ContentRating.ADULT,
coverUrl = findImageSrc(thumbnailElement),
tags = emptySet(),
state = null,
authors = emptySet(),
source = source,
)
}
}
override suspend fun getDetails(manga: Manga): Manga {
val doc = webClient.httpGet(manga.url.toAbsoluteUrl(domain)).parseHtml()
val title = doc.selectFirst("h1.entry-title")?.text() ?: manga.title
val altTitles = mutableSetOf<String>()
val altTitleElement = doc.selectFirst("p.alt-title-class")
if (altTitleElement != null) {
var nextElement = altTitleElement.nextElementSibling()
while (nextElement != null && nextElement.tagName() == "p" &&
!nextElement.hasClass("info-class") && !nextElement.hasClass("chapter-class")
) {
val altTitle = nextElement.text().trim()
if (altTitle.isNotEmpty()) {
altTitles.add(altTitle)
}
nextElement = nextElement.nextElementSibling()
}
}
var description = ""
val descriptionElement = doc.selectFirst("p.info-class")
if (descriptionElement != null) {
var nextElement = descriptionElement.nextElementSibling()
val descParts = mutableListOf<String>()
while (nextElement != null && nextElement.tagName() == "p" &&
!nextElement.hasClass("chapter-class") && !nextElement.hasClass("alt-title-class")
) {
val text = nextElement.text()
if (text.isNotEmpty()) {
descParts.add(text)
}
nextElement = nextElement.nextElementSibling()
}
description = descParts.joinToString("\n\n")
}
if (description.isEmpty()) {
description = doc.select("div.entry-content p strong")
.joinToString("\n") { it.text() }
.trim()
.ifEmpty { title }
}
val authorFromTitle = title.substringAfter("[").substringBefore("]").trim()
val authorFromTag = doc.select("span.entry-tags a[href*='/tag/']")
.firstOrNull { it.text().contains("(") && it.text().contains(")") }
?.text()?.trim()
val author = authorFromTag ?: authorFromTitle
val genres = mutableSetOf<MangaTag>()
doc.select("span.entry-terms:has(span:contains(Genres)) a").forEach {
genres.add(
MangaTag(
title = it.text(),
key = it.attr("href").substringAfterLast("/genre/").substringBefore("/"),
source = source,
),
)
}
val state = when (doc.select("a[href*=status]").firstOrNull()?.text()) {
"Ongoing" -> MangaState.ONGOING
"Completed" -> MangaState.FINISHED
else -> null
}
val chapters = parseChapters(doc)
return manga.copy(
altTitles = altTitles,
description = description,
tags = genres,
state = state,
authors = setOfNotNull(author.takeIf { it.isNotEmpty() && it != title }),
chapters = chapters,
)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
val images = doc.select("div.entry-content img.img-myreadingmanga, div.entry-content div > img")
.filter { element ->
val src = findImageSrc(element)
src != null && !src.contains("GH-") && !src.contains("nucarnival") &&
!src.contains("/wp-content/uploads/202") // Exclude old uploads that might be ads
}
.mapNotNull { findImageSrc(it) }
.distinct()
return images.mapIndexed { index, url ->
MangaPage(
id = generateUid(url),
url = url,
preview = null,
source = source,
)
}
}
private suspend fun fetchTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain/").parseHtml()
return doc.select("h4.widget-title.widgettitle:contains(Genres) + .tagcloud a")
.mapToSet { element ->
MangaTag(
title = element.text().substringBefore(" ("),
key = element.attr("href").trimEnd('/').substringAfterLast('/'),
source = source,
)
}
}
private val titleRegex = Pattern.compile("""\[[^]]*]""")
private val imgRegex = Pattern.compile("""\.(jpg|png|jpeg|webp)""")
private fun findImageSrc(element: Element?): String? {
element ?: return null
return when {
element.hasAttr("data-src") && imgRegex.matcher(element.attr("data-src")).find() ->
element.absUrl("data-src")
element.hasAttr("data-cfsrc") && imgRegex.matcher(element.attr("data-cfsrc")).find() ->
element.absUrl("data-cfsrc")
element.hasAttr("src") && imgRegex.matcher(element.attr("src")).find() ->
element.absUrl("src")
element.hasAttr("data-lazy-src") ->
element.absUrl("data-lazy-src")
else -> null
}
}
private fun parseChapters(document: Document): List<MangaChapter> {
val chapters = mutableListOf<MangaChapter>()
val mangaUrl = document.baseUri().removeSuffix("/")
val date = parseDate(document.select("time.entry-time").text())
// Look for chapter information
val chapterClass = document.selectFirst("div.chapter-class")
// Check if there's a chapter title after the chapter-class div
var chapterTitle: String? = null
if (chapterClass != null) {
var nextElement = chapterClass.nextElementSibling()
while (nextElement != null && nextElement.tagName() != "div") {
if (nextElement.tagName() == "p" && nextElement.text().contains("Chapter", ignoreCase = true)) {
chapterTitle = nextElement.text().trim()
break
}
nextElement = nextElement.nextElementSibling()
}
}
// Check for pagination
val paginationInContent =
document.select("div.entry-pagination a.page-numbers, div.chapter-class .entry-pagination a.page-numbers")
.mapNotNull { it.text().toIntOrNull() }
.maxOrNull()
if (paginationInContent != null && paginationInContent > 1) {
// Multi-page manga with chapters
for (i in 1..paginationInContent) {
val title = when {
chapterTitle != null && i == 1 -> chapterTitle
chapterTitle != null -> chapterTitle.replace("1", i.toString())
else -> "Chapter $i"
}
chapters.add(
MangaChapter(
id = generateUid("$mangaUrl/$i"),
title = title,
number = i.toFloat(),
url = if (i == 1) mangaUrl else "$mangaUrl/$i/",
uploadDate = date,
source = source,
scanlator = null,
branch = null,
volume = 0,
),
)
}
} else {
// Single page manga or no pagination found
chapters.add(
MangaChapter(
id = generateUid(mangaUrl),
title = chapterTitle ?: "Complete",
number = 1f,
url = mangaUrl,
uploadDate = date,
source = source,
scanlator = null,
branch = null,
volume = 0,
),
)
}
return chapters
}
private fun parseDate(date: String): Long {
return try {
SimpleDateFormat("MMMM dd, yyyy", Locale.US).parse(date)?.time ?: 0
} catch (_: Exception) {
0L
}
}
}

@ -8,8 +8,8 @@ import okhttp3.Interceptor
import okhttp3.Response
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.text.SimpleDateFormat
@ -17,17 +17,22 @@ import java.util.*
internal abstract class NineMangaParser(
context: MangaLoaderContext,
source: MangaSource,
source: MangaParserSource,
defaultDomain: String,
) : PagedMangaParser(context, source, pageSize = 26), Interceptor {
override val configKeyDomain = ConfigKey.Domain(defaultDomain)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
init {
context.cookieJar.insertCookies(domain, "ninemanga_template_desk=yes")
}
override val headers = super.headers.newBuilder()
override fun getRequestHeaders() = super.getRequestHeaders().newBuilder()
.add("Accept-Language", "en-US;q=0.7,en;q=0.3")
.build()
@ -35,12 +40,21 @@ internal abstract class NineMangaParser(
SortOrder.POPULARITY,
)
override val availableStates: Set<MangaState> = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
)
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isMultipleTagsSupported = true,
isTagsExclusionSupported = true,
isSearchWithFiltersSupported = true,
isSearchSupported = true,
)
override val isTagsExclusionSupported: Boolean = true
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = getOrCreateTagMap().values.toSet(),
availableStates = EnumSet.of(
MangaState.ONGOING,
MangaState.FINISHED,
),
)
override fun intercept(chain: Interceptor.Chain): Response {
val request = chain.request()
@ -52,56 +66,46 @@ internal abstract class NineMangaParser(
return chain.proceed(newRequest)
}
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
when (filter) {
is MangaListFilter.Search -> {
append("/search/?name_sel=&wd=")
if (filter.tags.isNotEmpty() || filter.tagsExclude.isNotEmpty() || filter.states.isNotEmpty() || !filter.query.isNullOrEmpty()) {
append("/search/")
append("?page=")
append(page.toString())
filter.query?.let {
append("&name_sel=contain&wd=")
append(filter.query.urlEncoded())
append("&page=")
append(page)
append(".html")
}
is MangaListFilter.Advanced -> {
if (filter.tags.isNotEmpty() || filter.tagsExclude.isNotEmpty() || filter.states.isNotEmpty()) {
append("/search/?category_id=")
append(filter.tags.joinToString(separator = ",") { it.key })
append("&out_category_id=")
append(filter.tagsExclude.joinToString(separator = ",") { it.key })
filter.states.oneOrThrowIfMany()?.let {
append("&completed_series=")
when (it) {
MangaState.ONGOING -> append("no")
MangaState.FINISHED -> append("yes")
else -> append("either")
}
}
append("&page=")
} else {
append("/category/index_")
append("&category_id=")
append(filter.tags.joinToString(separator = ",") { it.key })
append("&out_category_id=")
append(filter.tagsExclude.joinToString(separator = ",") { it.key })
filter.states.oneOrThrowIfMany()?.let {
append("&completed_series=")
when (it) {
MangaState.ONGOING -> append("no")
MangaState.FINISHED -> append("yes")
else -> append("either")
}
append(page.toString())
append(".html")
}
null -> {
append("/category/index_")
append(page)
append(".html")
}
} else {
append("/category/index_")
append(page.toString())
}
}
val doc = webClient.httpGet(url).parseHtml()
val root = doc.body().selectFirst("ul.direlist") ?: doc.parseFailed("Cannot find root")
val root = doc.body().selectFirstOrThrow("ul.direlist")
val baseHost = root.baseUri().toHttpUrl().host
return root.select("li").map { node ->
val href = node.selectFirst("a")?.absUrl("href") ?: node.parseFailed("Link not found")
val href = node.selectFirstOrThrow("a").attrAsAbsoluteUrl("href")
val relUrl = href.toRelativeUrl(baseHost)
val dd = node.selectFirst("dd")
Manga(
@ -109,11 +113,11 @@ internal abstract class NineMangaParser(
url = relUrl,
publicUrl = href,
title = dd?.selectFirst("a.bookname")?.text()?.toCamelCase().orEmpty(),
altTitle = null,
coverUrl = node.selectFirst("img")?.absUrl("src").orEmpty(),
altTitles = emptySet(),
coverUrl = node.selectFirst("img")?.src(),
rating = RATING_UNKNOWN,
author = null,
isNsfw = false,
authors = emptySet(),
contentRating = null,
tags = emptySet(),
state = null,
source = source,
@ -131,21 +135,24 @@ internal abstract class NineMangaParser(
val tagMap = getOrCreateTagMap()
val selectTag = infoRoot.getElementsByAttributeValue("itemprop", "genre").first()?.select("a")
val tags = selectTag?.mapNotNullToSet { tagMap[it.text()] }
val author = infoRoot.getElementsByAttributeValue("itemprop", "author").first()?.textOrNull()
return manga.copy(
title = root.selectFirst("h1[itemprop=name]")?.textOrNull()?.removeSuffix("Manga")?.trimEnd()
?: manga.title,
tags = tags.orEmpty(),
author = infoRoot.getElementsByAttributeValue("itemprop", "author").first()?.text(),
authors = setOfNotNull(author),
state = parseStatus(infoRoot.select("li a.red").text()),
description = infoRoot.getElementsByAttributeValue("itemprop", "description").first()?.html()
?.substringAfter("</b>"),
chapters = root.selectFirst("div.chapterbox")?.select("ul.sub_vol_ul > li")
?.mapChapters(reversed = true) { i, li ->
val a = li.selectFirst("a.chapter_list_a")
val href =
a?.attrAsRelativeUrlOrNull("href")?.replace("%20", " ") ?: li.parseFailed("Link not found")
val a = li.selectFirstOrThrow("a.chapter_list_a")
val href = a.attrAsRelativeUrl("href").replace("%20", " ")
MangaChapter(
id = generateUid(href),
name = a.text(),
number = i + 1,
title = a.textOrNull(),
number = i + 1f,
volume = 0,
url = href,
uploadDate = parseChapterDateByLang(li.selectFirst("span")?.text().orEmpty()),
source = source,
@ -158,7 +165,7 @@ internal abstract class NineMangaParser(
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = webClient.httpGet(chapter.url.toAbsoluteUrl(domain)).parseHtml()
return doc.body().getElementById("page")?.select("option")?.map { option ->
return doc.body().requireElementById("page").select("option").map { option ->
val url = option.attr("value")
MangaPage(
id = generateUid(url),
@ -166,22 +173,18 @@ internal abstract class NineMangaParser(
preview = null,
source = source,
)
} ?: doc.parseFailed("Pages list not found")
}
}
override suspend fun getPageUrl(page: MangaPage): String {
val doc = webClient.httpGet(page.url.toAbsoluteUrl(domain)).parseHtml()
val root = doc.body()
return root.selectFirst("a.pic_download")?.absUrl("href") ?: doc.parseFailed("Page image not found")
return root.selectFirstOrThrow("a.pic_download").attrAsAbsoluteUrl("href")
}
private var tagCache: ArrayMap<String, MangaTag>? = null
private val mutex = Mutex()
override suspend fun getAvailableTags(): Set<MangaTag> {
return getOrCreateTagMap().values.toSet()
}
private suspend fun getOrCreateTagMap(): Map<String, MangaTag> = mutex.withLock {
tagCache?.let { return@withLock it }
val tagMap = ArrayMap<String, MangaTag>()
@ -230,7 +233,7 @@ internal abstract class NineMangaParser(
if (dateWords.size == 3) {
if (dateWords[1].contains(",")) {
SimpleDateFormat("MMM d, yyyy", Locale.ENGLISH).tryParse(date)
SimpleDateFormat("MMM d, yyyy", Locale.ENGLISH).parseSafe(date)
} else {
val timeAgo = Integer.parseInt(dateWords[0])
return Calendar.getInstance().apply {
@ -266,49 +269,49 @@ internal abstract class NineMangaParser(
@MangaSourceParser("NINEMANGA_EN", "NineManga English", "en")
class English(context: MangaLoaderContext) : NineMangaParser(
context,
MangaSource.NINEMANGA_EN,
MangaParserSource.NINEMANGA_EN,
"www.ninemanga.com",
)
@MangaSourceParser("NINEMANGA_ES", "NineManga Español", "es")
class Spanish(context: MangaLoaderContext) : NineMangaParser(
context,
MangaSource.NINEMANGA_ES,
MangaParserSource.NINEMANGA_ES,
"es.ninemanga.com",
)
@MangaSourceParser("NINEMANGA_RU", "NineManga Русский", "ru")
class Russian(context: MangaLoaderContext) : NineMangaParser(
context,
MangaSource.NINEMANGA_RU,
MangaParserSource.NINEMANGA_RU,
"ru.ninemanga.com",
)
@MangaSourceParser("NINEMANGA_DE", "NineManga Deutsch", "de")
class Deutsch(context: MangaLoaderContext) : NineMangaParser(
context,
MangaSource.NINEMANGA_DE,
MangaParserSource.NINEMANGA_DE,
"de.ninemanga.com",
)
@MangaSourceParser("NINEMANGA_BR", "NineManga Brasil", "pt")
class Brazil(context: MangaLoaderContext) : NineMangaParser(
context,
MangaSource.NINEMANGA_BR,
MangaParserSource.NINEMANGA_BR,
"br.ninemanga.com",
)
@MangaSourceParser("NINEMANGA_IT", "NineManga Italiano", "it")
class Italiano(context: MangaLoaderContext) : NineMangaParser(
context,
MangaSource.NINEMANGA_IT,
MangaParserSource.NINEMANGA_IT,
"it.ninemanga.com",
)
@MangaSourceParser("NINEMANGA_FR", "NineManga Français", "fr")
class Francais(context: MangaLoaderContext) : NineMangaParser(
context,
MangaSource.NINEMANGA_FR,
MangaParserSource.NINEMANGA_FR,
"fr.ninemanga.com",
)
}

@ -5,37 +5,51 @@ import okhttp3.Interceptor
import okhttp3.Response
import org.json.JSONArray
import org.json.JSONObject
import org.koitharu.kotatsu.parsers.Broken
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.asTypedList
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
import org.koitharu.kotatsu.parsers.util.json.mapJSON
import org.koitharu.kotatsu.parsers.util.json.mapJSONToSet
import org.koitharu.kotatsu.parsers.util.json.toJSONList
import org.koitharu.kotatsu.parsers.util.suspendlazy.suspendLazy
import java.text.SimpleDateFormat
import java.util.*
@MangaSourceParser("NINENINENINEHENTAI", "999Hentai", type = ContentType.HENTAI)
@Broken
@MangaSourceParser("NINENINENINEHENTAI", "AnimeH", type = ContentType.HENTAI)
internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
PagedMangaParser(context, MangaSource.NINENINENINEHENTAI, size), Interceptor {
PagedMangaParser(context, MangaParserSource.NINENINENINEHENTAI, PAGE_SIZE), Interceptor {
override val configKeyDomain = ConfigKey.Domain("999hentai.net")
override val configKeyDomain = ConfigKey.Domain("animeh.to")
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: EnumSet<SortOrder> = EnumSet.of(
SortOrder.POPULARITY,
SortOrder.NEWEST,
)
override val isMultipleTagsSupported = false
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
override suspend fun getAvailableLocales() = setOf(
Locale.ENGLISH,
Locale.CHINESE,
Locale.JAPANESE,
Locale("es"),
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableLocales = setOf(
Locale.ENGLISH,
Locale.CHINESE,
Locale.JAPANESE,
Locale("es"),
),
)
private fun Locale?.getSiteLang(): String {
@ -61,7 +75,7 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
return chain.proceed(newRequest)
}
private val cdnHost = SuspendLazy(::getUpdatedCdnHost)
private val cdnHost = suspendLazy(initializer = ::getUpdatedCdnHost)
private suspend fun getUpdatedCdnHost(): String {
val url = "https://$domain/manga-home"
@ -70,7 +84,7 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
return cdn?.toHttpUrlOrNull()?.host ?: "edge.fast4speed.rsvp"
}
override suspend fun getAvailableTags(): Set<MangaTag> {
private suspend fun fetchAvailableTags(): Set<MangaTag> {
val query = """
queryTags(
search: {format:"tagchapter",sortBy:Popular}
@ -97,23 +111,15 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
}
}
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
return when (filter) {
is MangaListFilter.Advanced -> {
if (filter.tags.isEmpty() && filter.sortOrder == SortOrder.POPULARITY) {
getPopularList(page, filter.locale)
} else {
getSearchList(page, null, filter.locale, filter.tags, filter.sortOrder)
}
}
is MangaListFilter.Search -> {
getSearchList(page, filter.query, null, null, filter.sortOrder)
}
else -> {
getPopularList(page, null)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
return if (filter.query.isNullOrEmpty()) {
if (filter.tags.isEmpty() && order == SortOrder.POPULARITY) {
getPopularList(page, filter.locale)
} else {
getSearchList(page, null, filter.locale, filter.tags, order)
}
} else {
getSearchList(page, filter.query, null, null, order)
}
}
@ -123,7 +129,7 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
): List<Manga> {
val query = """
queryPopularChapters(
size: $size
size: $PAGE_SIZE
language: "${locale.getSiteLang()}"
dateRange: 1
page: $page
@ -164,7 +170,7 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
}
val query = """
queryChapters(
limit: $size
limit: $PAGE_SIZE
search: {$searchPayload}
page: $page
) {
@ -194,14 +200,14 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
Manga(
id = generateUid(id),
title = name.replace(shortenTitleRegex, "").trim(),
altTitle = name,
altTitles = setOf(name),
coverUrl = when {
cover?.startsWith("http") == true -> cover
cover == null -> ""
cover == null -> null
else -> "https://${cdnHost.get()}/$cover"
},
author = null,
isNsfw = true,
authors = emptySet(),
contentRating = ContentRating.ADULT,
url = id,
publicUrl = "/hchapter/$id".toAbsoluteUrl(domain),
tags = emptySet(),
@ -258,13 +264,14 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
type = it.getStringOrNull("tagType"),
)
}
val author = tags?.filter { it.type == "artist" }?.joinToString { it.name.toCamelCase() }?.nullIfEmpty()
return manga.copy(
title = name.replace(shortenTitleRegex, "").trim(),
altTitle = name,
altTitles = setOf(name),
coverUrl = cover.first,
largeCoverUrl = cover.second,
author = tags?.filter { it.type == "artist" }?.joinToString { it.name.toCamelCase() },
isNsfw = true,
authors = setOfNotNull(author),
contentRating = ContentRating.ADULT,
tags = tags?.mapToSet {
MangaTag(
title = it.name.toCamelCase(),
@ -277,8 +284,9 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
chapters = listOf(
MangaChapter(
id = generateUid(id),
name = name,
number = 1,
title = name,
number = 1f,
volume = 0,
url = id,
uploadDate = runCatching {
dateFormat.parse(entry.getString("uploadDate"))!!.time
@ -318,7 +326,7 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
_id: "${seed.url}"
search: {sortBy:Popular}
page: 1
size: $size
size: $PAGE_SIZE
) {
chapters {
_id
@ -360,8 +368,8 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
}
}
val pics = pages.getJSONArray("pics").toJSONList()
val picsS = pages.getJSONArray("picsS").toJSONList()
val pics = pages.getJSONArray("pics").asTypedList<JSONObject>()
val picsS = pages.getJSONArray("picsS").asTypedList<JSONObject>()
return pics.zip(picsS).map {
val img = it.first.getString("url")
@ -380,7 +388,7 @@ internal class NineNineNineHentaiParser(context: MangaLoaderContext) :
}
companion object {
private const val size = 20
private const val PAGE_SIZE = 20
private val shortenTitleRegex = Regex("""(\[[^]]*]|[({][^)}]*[)}])""")
private val dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.ENGLISH)
}

@ -1,85 +1,56 @@
package org.koitharu.kotatsu.parsers.site.all
import androidx.collection.arraySetOf
import kotlinx.coroutines.async
import kotlinx.coroutines.awaitAll
import kotlinx.coroutines.coroutineScope
import okhttp3.Headers
import okhttp3.HttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrl
import org.json.JSONArray
import org.json.JSONObject
import org.jsoup.nodes.Element
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaParser
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.exception.NotFoundException
import org.koitharu.kotatsu.parsers.core.AbstractMangaParser
import org.koitharu.kotatsu.parsers.exception.ParseException
import org.koitharu.kotatsu.parsers.model.ContentType
import org.koitharu.kotatsu.parsers.model.Manga
import org.koitharu.kotatsu.parsers.model.MangaChapter
import org.koitharu.kotatsu.parsers.model.MangaListFilter
import org.koitharu.kotatsu.parsers.model.MangaPage
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.model.MangaTag
import org.koitharu.kotatsu.parsers.model.RATING_UNKNOWN
import org.koitharu.kotatsu.parsers.model.SortOrder
import org.koitharu.kotatsu.parsers.util.SoftSuspendLazy
import org.koitharu.kotatsu.parsers.util.SuspendLazy
import org.koitharu.kotatsu.parsers.util.domain
import org.koitharu.kotatsu.parsers.util.generateUid
import org.koitharu.kotatsu.parsers.util.json.getBooleanOrDefault
import org.koitharu.kotatsu.parsers.util.json.getFloatOrDefault
import org.koitharu.kotatsu.parsers.util.json.getIntOrDefault
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import org.koitharu.kotatsu.parsers.util.json.getStringOrNull
import org.koitharu.kotatsu.parsers.util.json.mapJSON
import org.koitharu.kotatsu.parsers.util.json.mapJSONIndexed
import org.koitharu.kotatsu.parsers.util.mapChapters
import org.koitharu.kotatsu.parsers.util.oneOrThrowIfMany
import org.koitharu.kotatsu.parsers.util.parseJson
import org.koitharu.kotatsu.parsers.util.splitTwoParts
import org.koitharu.kotatsu.parsers.util.toAbsoluteUrl
import org.koitharu.kotatsu.parsers.util.urlEncoded
import java.util.Calendar
import java.util.EnumSet
import javax.crypto.Mac
import javax.crypto.spec.SecretKeySpec
internal abstract class WebtoonsParser(
context: MangaLoaderContext,
source: MangaSource,
) : MangaParser(context, source) {
source: MangaParserSource,
) : AbstractMangaParser(context, source) {
override val isMultipleTagsSupported = false
private val signer by lazy {
WebtoonsUrlSigner("gUtPzJFZch4ZyAGviiyH94P99lQ3pFdRTwpJWDlSGFfwgpr6ses5ALOxWHOIT7R1")
}
// we don't __really__ support changing this domain because:
// 1. I don't think other websites have this exact API
// 2. most communication is done with other domains (hosting API and static content), which are not configurable
// 3. we rely on the HTTP client setting the referer header to webtoons.com
//
// This effectively means that changing the domain will break the source. Yikes
override val configKeyDomain = ConfigKey.Domain("webtoons.com")
private val apiDomain = "global.apis.naver.com"
private val mobileApiDomain = "m.webtoons.com"
private val staticDomain = "webtoon-phinf.pstatic.net"
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.POPULARITY, // views
SortOrder.RATING, // star rating
//SortOrder.LIKE, // likes
override val availableSortOrders: EnumSet<SortOrder> = EnumSet.of(
SortOrder.POPULARITY,
SortOrder.RATING,
SortOrder.UPDATED,
)
override val headers: Headers
get() = Headers.Builder().add("User-Agent", "nApps (Android 12;; linewebtoon; 3.1.0)").build()
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
)
override val userAgentKey =
ConfigKey.UserAgent("Mozilla/5.0 (Linux; Android 12; SM-G991B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.120 Mobile Safari/537.36")
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = availableTags(),
)
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override suspend fun getPageUrl(page: MangaPage): String {
return page.url.toAbsoluteUrl(staticDomain)
}
// some language tags do not map perfectly to the ones used by the API
private val languageCode: String
get() = when (val tag = sourceLocale.toLanguageTag()) {
"in" -> "id"
@ -87,272 +58,242 @@ internal abstract class WebtoonsParser(
else -> tag
}
private suspend fun fetchEpisodes(titleNo: Long): List<MangaChapter> = coroutineScope {
val firstResult =
makeRequest("/lineWebtoon/webtoon/episodeList.json?v=5&titleNo=$titleNo&startIndex=0&pageSize=30")
val totalEpisodeCount = firstResult.getJSONObject("episodeList").getInt("totalServiceEpisodeCount")
val episodes = firstResult.getJSONObject("episodeList").getJSONArray("episode").toJSONList().toMutableList()
private suspend fun fetchEpisodes(titleNo: Long): List<MangaChapter> {
val url = "https://$mobileApiDomain/api/v1/webtoon/$titleNo/episodes?pageSize=99999"
val json = webClient.httpGet(url).parseJson()
val additionalEpisodes = (episodes.size until totalEpisodeCount step 30).map { startIndex ->
async {
makeRequest("/lineWebtoon/webtoon/episodeList.json?v=5&titleNo=$titleNo&startIndex=$startIndex&pageSize=30").getJSONObject(
"episodeList",
).getJSONArray("episode").toJSONList()
}
}.awaitAll().flatten()
val episodeList = json.optJSONObject("result")?.optJSONArray("episodeList")
?: throw ParseException("No episodes found for title $titleNo", url)
episodes.addAll(additionalEpisodes)
return episodeList.mapChapters { _, jo ->
val episodeTitle = jo.getStringOrNull("episodeTitle") ?: ""
val episodeNo = jo.getInt("episodeNo")
val viewerLink = jo.getString("viewerLink")
// Optimize object creation and sorting
episodes.mapChapters { i, jo ->
MangaChapter(
id = generateUid("$titleNo-$i"),
name = jo.getString("episodeTitle"),
number = jo.getInt("episodeSeq"),
url = "$titleNo-${jo.get("episodeNo")}",
uploadDate = jo.getLong("registerYmdt"),
id = generateUid("$titleNo-$episodeNo"),
title = episodeTitle,
number = episodeNo.toFloat(),
volume = 0,
url = viewerLink,
uploadDate = jo.getLong("exposureDateMillis"),
branch = null,
scanlator = null,
source = source,
)
}.sortedBy(MangaChapter::number)
}
private fun JSONArray.toJSONList(): List<JSONObject> {
val list = mutableListOf<JSONObject>()
for (i in 0 until length()) {
list.add(getJSONObject(i))
}
return list
}
override suspend fun getDetails(manga: Manga): Manga = coroutineScope {
val titleNo = manga.url.toLong()
val chaptersDeferred = async { fetchEpisodes(titleNo) }
val chapters = chaptersDeferred.await()
makeRequest("/lineWebtoon/webtoon/titleInfo.json?titleNo=${titleNo}&anyServiceStatus=false").getJSONObject("titleInfo")
.let { jo ->
MangaWebtoon(
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitle = null,
url = "$titleNo",
publicUrl = "https://$domain/$languageCode/originals/a/list?title_no=${titleNo}",
rating = jo.getFloatOrDefault("starScoreAverage", -10f) / 10f,
isNsfw = jo.getBooleanOrDefault("ageGradeNotice", isNsfwSource),
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = jo.getStringOrNull("thumbnailVertical")?.toAbsoluteUrl(staticDomain),
tags = setOf(parseTag(jo.getJSONObject("genreInfo"))),
author = jo.getStringOrNull("writingAuthorName"),
description = jo.getString("synopsis"),
// I don't think the API provides this info,
state = null,
chapters = chapters,
source = source,
),
date = jo.getLong("lastEpisodeRegisterYmdt"),
readCount = jo.getLong("readCount"),
//likeCount = jo.getLong("likeitCount"),
).manga
}
}
val detailsUrl = manga.publicUrl.ifBlank {
"https://$domain/$languageCode/drama/placeholder/list?title_no=$titleNo"
}
private val allGenreCache = SuspendLazy {
makeRequest("/lineWebtoon/webtoon/genreList.json").getJSONObject("genreList").getJSONArray("genres")
.mapJSON { jo -> parseTag(jo) }.associateBy { tag -> tag.key }
}
val doc = webClient.httpGet(detailsUrl).parseHtml()
private val allTitleCache = SoftSuspendLazy {
makeRequest("/lineWebtoon/webtoon/titleList.json?").getJSONObject("titleList").getJSONArray("titles")
.mapJSON { jo ->
val titleNo = jo.getLong("titleNo")
MangaWebtoon(
Manga(
id = generateUid(titleNo),
url = titleNo.toString(),
publicUrl = "https://$domain/$languageCode/originals/a/list?title_no=$titleNo",
title = jo.getString("title"),
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
altTitle = null,
author = jo.getStringOrNull("writingAuthorName"),
isNsfw = jo.getBooleanOrDefault("ageGradeNotice", isNsfwSource),
rating = jo.getFloatOrDefault("starScoreAverage", -10f) / 10f,
tags = setOfNotNull(allGenreCache.get()[jo.getString("representGenre")]),
description = jo.getString("synopsis"),
state = null,
source = source,
),
date = jo.getLong("lastEpisodeRegisterYmdt"),
readCount = jo.getLong("readCount"),
//likeCount = jo.getLong("likeitCount"),
)
}
val title = doc.select("meta[property='og:title']").attr("content")
.ifEmpty { doc.select("h1.subj, h3.subj").text().ifEmpty { manga.title } }
val description = listOf(
doc.select("meta[property='og:description']").attr("content"),
doc.select("#_asideDetail p.summary").text(),
doc.select(".detail_header .summary").text(),
).firstOrNull { it.isNotBlank() }.orEmpty()
val coverUrl = doc.select("meta[property=\"og:image\"]").attr("content").let { url ->
if (url.isNotBlank()) url.toAbsoluteUrl(staticDomain) else manga.coverUrl
}
val author = listOf(
doc.select("meta[property='com-linewebtoon:webtoon:author']").attr("content"),
doc.select(".detail_header .info .author").firstOrNull()?.text(),
doc.select(".author_area").text(),
).firstOrNull { !it.isNullOrBlank() && it != "null" }
val genreElements = doc.select(".detail_header .info .genre").ifEmpty {
doc.select("h2.genre")
}
val genres = genreElements.map { it.text() }.toSet()
val dayInfo = doc.select("#_asideDetail p.day_info").text().ifEmpty {
doc.select(".day_info").text()
}
val state = when {
dayInfo.contains("UP") || dayInfo.contains("EVERY") || dayInfo.contains("NOUVEAU") -> MangaState.ONGOING
dayInfo.contains("END") || dayInfo.contains("COMPLETED") || dayInfo.contains("TERMINÉ") -> MangaState.FINISHED
else -> null
}
val chapters = async { fetchEpisodes(titleNo) }.await()
Manga(
id = generateUid(titleNo),
title = title,
altTitles = emptySet(),
url = "$titleNo",
publicUrl = detailsUrl,
rating = RATING_UNKNOWN,
contentRating = null,
coverUrl = coverUrl,
largeCoverUrl = null,
tags = genres.map { genre -> MangaTag(title = genre, key = genre.lowercase(), source = source) }.toSet(),
authors = setOfNotNull(author.takeIf { it != "null" }),
description = description,
state = state,
chapters = chapters,
source = source,
)
}
private suspend fun getAllGenreList(): Map<String, MangaTag> {
return allGenreCache.get()
private fun getSortOrderParam(order: SortOrder): String {
return when (order) {
SortOrder.POPULARITY -> "MANA"
SortOrder.RATING -> "LIKEIT"
SortOrder.UPDATED -> "UPDATE"
else -> "MANA"
}
}
private suspend fun getAllTitleList(): List<MangaWebtoon> {
return allTitleCache.get()
private fun availableTags() = arraySetOf(
MangaTag("Action", "action", source),
MangaTag("Comedy", "comedy", source),
MangaTag("Drama", "drama", source),
MangaTag("Fantasy", "fantasy", source),
MangaTag("Horror", "horror", source),
MangaTag("Romance", "romance", source),
MangaTag("Sci-Fi", "sf", source),
MangaTag("Slice of Life", "slice_of_life", source),
MangaTag("Sports", "sports", source),
MangaTag("Supernatural", "supernatural", source),
MangaTag("Thriller", "thriller", source),
MangaTag("Historical", "historical", source),
MangaTag("Mystery", "mystery", source),
MangaTag("Superhero", "super_hero", source),
MangaTag("Heartwarming", "heartwarming", source),
MangaTag("Graphic Novel", "graphic_novel", source),
MangaTag("Informative", "tiptoon", source),
)
private val genreUrlMap: Map<String, String> = availableTags().associate {
it.title.lowercase() to it.key
}
override suspend fun getList(offset: Int, filter: MangaListFilter?): List<Manga> {
val webtoons = when (filter) {
is MangaListFilter.Search -> {
makeRequest("/lineWebtoon/webtoon/searchWebtoon?query=${filter.query.urlEncoded()}").getJSONObject("webtoonSearch")
.getJSONArray("titleList").mapJSON { jo ->
val titleNo = jo.getLong("titleNo")
MangaWebtoon(
Manga(
id = generateUid(titleNo),
title = jo.getString("title"),
altTitle = null,
url = titleNo.toString(),
publicUrl = "https://$domain/$languageCode/originals/a/list?title_no=$titleNo",
rating = RATING_UNKNOWN,
isNsfw = isNsfwSource,
coverUrl = jo.getString("thumbnail").toAbsoluteUrl(staticDomain),
largeCoverUrl = null,
tags = emptySet(),
author = jo.getStringOrNull("writingAuthorName"),
description = null,
state = null,
source = source,
))
}
override suspend fun getList(offset: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val document = when {
!filter.query.isNullOrEmpty() -> {
val searchUrl = "https://$domain/$languageCode/search?keyword=${filter.query.urlEncoded()}"
webClient.httpGet(searchUrl).parseHtml()
}
is MangaListFilter.Advanced -> {
val genre = filter.tags.oneOrThrowIfMany()?.key ?: "ALL"
val genres = getAllGenreList()
val result = getAllTitleList()
val sortedResult = when (filter.sortOrder) {
SortOrder.UPDATED -> result.sortedBy { it.date }
SortOrder.POPULARITY -> result.sortedByDescending { it.readCount }
SortOrder.RATING -> result.sortedByDescending { it.manga.rating }
//SortOrder.LIKE -> result.sortedBy { it.likeitCount }
else -> throw IllegalArgumentException("Unsupported sort order: ${filter.sortOrder}")
}
filter.tags.isNotEmpty() -> {
val selectedGenre = filter.tags.first()
val genreUrlPath = genreUrlMap[selectedGenre.key] ?: selectedGenre.key
val sortParam = getSortOrderParam(order)
val genreUrl = "https://$domain/$languageCode/genres/$genreUrlPath?sortOrder=$sortParam"
webClient.httpGet(genreUrl).parseHtml()
}
if (genre != "ALL") {
sortedResult.filter { it.manga.tags.contains(genres[genre]) }
} else {
sortedResult
else -> {
val rankingType = when (order) {
SortOrder.POPULARITY -> "popular"
SortOrder.RATING -> "trending"
SortOrder.UPDATED -> "originals"
else -> "popular"
}
val rankingUrl = "https://$domain/$languageCode/ranking/$rankingType"
webClient.httpGet(rankingUrl).parseHtml()
}
else -> getAllTitleList()
}
return webtoons.map { it.manga }.subList(offset, (offset + 20).coerceAtMost(webtoons.size))
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val (titleNo, episodeNo) = requireNotNull(chapter.url.splitTwoParts('-'))
return makeRequest("/lineWebtoon/webtoon/episodeInfo.json?v=4&titleNo=$titleNo&episodeNo=$episodeNo").getJSONObject(
"episodeInfo",
).getJSONArray("imageInfo").mapJSONIndexed { i, jo ->
MangaPage(
id = generateUid("$titleNo-$episodeNo-$i"),
url = jo.getString("url"),
preview = null,
source = source,
)
}
val selectedGenreForManga = if (filter.tags.isNotEmpty()) filter.tags.first() else null
return document.select(".webtoon_list li a, .card_wrap .card_item a")
.map { element -> createMangaFromElement(element, source, selectedGenreForManga) }
.drop(offset)
.take(20)
}
private fun parseTag(jo: JSONObject): MangaTag {
return MangaTag(
title = jo.getString("name"),
key = jo.getString("code"),
private fun createMangaFromElement(
element: Element,
source: MangaParserSource,
selectedGenre: MangaTag? = null,
): Manga {
val href = element.absUrl("href")
val titleNo = extractTitleNoFromUrl(href)
val title = element.select(".title, .card_title").text()
val thumbnailUrl = element.select("img").attr("src")
return Manga(
id = generateUid(titleNo),
title = title,
altTitles = emptySet(),
url = titleNo.toString(),
publicUrl = href,
rating = RATING_UNKNOWN,
contentRating = null,
coverUrl = thumbnailUrl.toAbsoluteUrl(staticDomain),
largeCoverUrl = null,
tags = selectedGenre?.let { setOf(it) } ?: emptySet(),
authors = emptySet(),
description = null,
state = null,
source = source,
)
}
override suspend fun getAvailableTags(): Set<MangaTag> {
return getAllGenreList().values.toSet()
private fun extractTitleNoFromUrl(url: String): Long {
return Regex("title_no=(\\d+)").find(url)?.groupValues?.get(1)?.toLong()
?: throw ParseException("Could not extract title_no from URL: $url", url)
}
private suspend fun makeRequest(url: String): JSONObject {
val resp = webClient.httpGet(finalizeUrl(url))
val message: JSONObject? = resp.parseJson().optJSONObject("message")
return when (resp.code) {
in 200..299 -> checkNotNull(message).getJSONObject("result")
404 -> throw NotFoundException(message?.getStringOrNull("message").orEmpty(), url)
else -> {
val code = message?.getIntOrDefault("code", 0)
val errorMessage = message?.getStringOrNull("message")
throw ParseException("Api error (code=$code): $errorMessage", url)
}
override suspend fun getPages(chapter: MangaChapter): List<MangaPage> {
val doc = try {
val absUrl = chapter.url.toAbsoluteUrl(domain)
webClient.httpGet(absUrl).parseHtml()
} catch (e: Exception) {
throw ParseException("Failed to get pages for chapter: ${chapter.title}", chapter.url, e)
}
}
private fun finalizeUrl(url: String): HttpUrl {
val httpUrl = url.toAbsoluteUrl(apiDomain).toHttpUrl()
val builder = httpUrl.newBuilder().addQueryParameter("serviceZone", "GLOBAL")
if (httpUrl.queryParameter("v") == null) {
builder.addQueryParameter("v", "1")
fun extractImages(selector: String, attr: String = "data-url"): List<MangaPage> {
return doc.select(selector).mapIndexedNotNull { i, element ->
val url = element.attr(attr).takeIf { it.isNotBlank() }
?: element.attr("src").takeIf { it.contains(staticDomain) }
?: return@mapIndexedNotNull null
MangaPage(
id = generateUid("${chapter.id}-$i"),
url = url,
preview = null,
source = source,
)
}
}
builder.addQueryParameter("language", languageCode).addQueryParameter("locale", "languageCode")
.addQueryParameter("platform", "APP_ANDROID")
signer.makeEncryptUrl(builder)
return builder.build()
return extractImages("div#_imageList > img")
.ifEmpty { extractImages("canvas[data-url]") }
.ifEmpty { extractImages("img[src*='$staticDomain'], img[data-url*='$staticDomain']") }
.ifEmpty { throw ParseException("No images found in chapter.", chapter.url) }
}
@MangaSourceParser("WEBTOONS_EN", "Webtoons English", "en", type = ContentType.MANGA)
class English(context: MangaLoaderContext) : WebtoonsParser(context, MangaSource.WEBTOONS_EN)
class English(context: MangaLoaderContext) : WebtoonsParser(context, MangaParserSource.WEBTOONS_EN)
@MangaSourceParser("WEBTOONS_ID", "Webtoons Indonesia", "id", type = ContentType.MANGA)
class Indonesian(context: MangaLoaderContext) : WebtoonsParser(context, MangaSource.WEBTOONS_ID)
class Indonesian(context: MangaLoaderContext) : WebtoonsParser(context, MangaParserSource.WEBTOONS_ID)
@MangaSourceParser("WEBTOONS_ES", "Webtoons Spanish", "es", type = ContentType.MANGA)
class Spanish(context: MangaLoaderContext) : WebtoonsParser(context, MangaSource.WEBTOONS_ES)
class Spanish(context: MangaLoaderContext) : WebtoonsParser(context, MangaParserSource.WEBTOONS_ES)
@MangaSourceParser("WEBTOONS_FR", "Webtoons French", "fr", type = ContentType.MANGA)
class French(context: MangaLoaderContext) : WebtoonsParser(context, MangaSource.WEBTOONS_FR)
class French(context: MangaLoaderContext) : WebtoonsParser(context, MangaParserSource.WEBTOONS_FR)
@MangaSourceParser("WEBTOONS_TH", "Webtoons Thai", "th", type = ContentType.MANGA)
class Thai(context: MangaLoaderContext) : WebtoonsParser(context, MangaSource.WEBTOONS_TH)
class Thai(context: MangaLoaderContext) : WebtoonsParser(context, MangaParserSource.WEBTOONS_TH)
@MangaSourceParser("WEBTOONS_ZH", "Webtoons Chinese", "zh", type = ContentType.MANGA)
class Chinese(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.WEBTOONS_ZH)
class Chinese(context: MangaLoaderContext) : WebtoonsParser(context, MangaParserSource.WEBTOONS_ZH)
@MangaSourceParser("WEBTOONS_DE", "Webtoons German", "de", type = ContentType.MANGA)
class German(context: MangaLoaderContext) : LineWebtoonsParser(context, MangaSource.WEBTOONS_DE)
private inner class WebtoonsUrlSigner(private val secret: String) {
private val mac = Mac.getInstance("HmacSHA1").apply {
this.init(SecretKeySpec(secret.encodeToByteArray(), "HmacSHA1"))
}
private fun getMessage(url: String, msgpad: String): String {
return url.substring(0, 0xFF.coerceAtMost(url.length)) + msgpad
}
private fun getMessageDigest(s: String): String {
val signedMessage = synchronized(mac) { mac.doFinal(s.toByteArray()) }
return context.encodeBase64(signedMessage)
}
fun makeEncryptUrl(urlBuilder: HttpUrl.Builder) {
val msgPad = Calendar.getInstance().timeInMillis.toString()
val digest = getMessageDigest(getMessage(urlBuilder.build().toString(), msgPad))
urlBuilder.addQueryParameter("msgpad", msgPad).addQueryParameter("md", digest)
// .addEncodedQueryParameter("md", digest.urlEncoded())
}
}
private inner class MangaWebtoon(
val manga: Manga,
@JvmField val date: Long? = null,
@JvmField val readCount: Long? = null,
)
class German(context: MangaLoaderContext) : WebtoonsParser(context, MangaParserSource.WEBTOONS_DE)
}

@ -5,22 +5,25 @@ import kotlinx.coroutines.coroutineScope
import org.json.JSONArray
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.PagedMangaParser
import org.koitharu.kotatsu.parsers.config.ConfigKey
import org.koitharu.kotatsu.parsers.core.PagedMangaParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.util.*
import java.util.*
internal abstract class AnimeBootstrapParser(
context: MangaLoaderContext,
source: MangaSource,
source: MangaParserSource,
domain: String,
pageSize: Int = 24,
) : PagedMangaParser(context, source, pageSize) {
override val configKeyDomain = ConfigKey.Domain(domain)
override val isMultipleTagsSupported = false
override fun onCreateConfig(keys: MutableCollection<ConfigKey<*>>) {
super.onCreateConfig(keys)
keys.add(userAgentKey)
}
override val availableSortOrders: Set<SortOrder> = EnumSet.of(
SortOrder.UPDATED,
@ -32,13 +35,27 @@ internal abstract class AnimeBootstrapParser(
protected open val listUrl = "/manga"
protected open val datePattern = "dd MMM. yyyy"
init {
paginator.firstPage = 1
searchPaginator.firstPage = 1
}
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
override val filterCapabilities: MangaListFilterCapabilities
get() = MangaListFilterCapabilities(
isSearchSupported = true,
isSearchWithFiltersSupported = true,
)
override suspend fun getFilterOptions() = MangaListFilterOptions(
availableTags = fetchAvailableTags(),
availableContentTypes = EnumSet.of(
ContentType.MANGA,
ContentType.MANHWA,
ContentType.MANHUA,
),
)
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
@ -47,32 +64,37 @@ internal abstract class AnimeBootstrapParser(
append(page.toString())
append("&type=all")
when (filter) {
is MangaListFilter.Search -> {
append("&search=")
append(filter.query.urlEncoded())
}
is MangaListFilter.Advanced -> {
filter.tags.oneOrThrowIfMany()?.let {
append("&categorie=")
append(it.key)
}
filter.query?.let {
append("&search=")
append(filter.query.urlEncoded())
}
append("&sort=")
when (filter.sortOrder) {
SortOrder.POPULARITY -> append("view")
SortOrder.UPDATED -> append("updated")
SortOrder.ALPHABETICAL -> append("default")
SortOrder.NEWEST -> append("published")
else -> append("updated")
}
filter.tags.oneOrThrowIfMany()?.let {
append("&categorie=")
append(it.key)
}
}
filter.types.oneOrThrowIfMany()?.let {
append("&type=")
append(
when (it) {
ContentType.MANGA -> "manga"
ContentType.MANHWA -> "manhwa"
ContentType.MANHUA -> "manhua"
else -> "all"
},
)
}
null -> append("&sort=updated")
append("&sort=")
when (order) {
SortOrder.POPULARITY -> append("view")
SortOrder.UPDATED -> append("updated")
SortOrder.ALPHABETICAL -> append("default")
SortOrder.NEWEST -> append("published")
else -> append("updated")
}
}
val doc = webClient.httpGet(url).parseHtml()
@ -82,20 +104,21 @@ internal abstract class AnimeBootstrapParser(
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirstOrThrow("div.product__item__pic").attr("data-setbg").orEmpty(),
coverUrl = div.selectFirstOrThrow("div.product__item__pic")
.attrAsAbsoluteUrlOrNull("data-setbg"),
title = div.selectFirstOrThrow("div.product__item__text").text().orEmpty(),
altTitle = null,
altTitles = emptySet(),
rating = RATING_UNKNOWN,
tags = emptySet(),
author = null,
authors = emptySet(),
state = null,
source = source,
isNsfw = isNsfwSource,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
override suspend fun getAvailableTags(): Set<MangaTag> {
protected open suspend fun fetchAvailableTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain$listUrl").parseHtml()
return doc.select("div.product__page__filter div:contains(Genre:) option ").mapNotNullToSet { option ->
val key = option.attr("value") ?: return@mapNotNullToSet null
@ -124,7 +147,7 @@ internal abstract class AnimeBootstrapParser(
}
manga.copy(
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
tags = doc.body().select(selectTag).mapToSet { a ->
MangaTag(
key = a.attr("href").substringAfterLast('='),
title = a.text().toTitleCase().replace(",", ""),
@ -145,8 +168,9 @@ internal abstract class AnimeBootstrapParser(
val href = a.attr("href")
MangaChapter(
id = generateUid(href),
name = a.text(),
number = i + 1,
title = a.text(),
number = i + 1f,
volume = 0,
url = href,
uploadDate = 0,
source = source,

@ -3,20 +3,20 @@ package org.koitharu.kotatsu.parsers.site.animebootstrap.fr
import kotlinx.coroutines.async
import kotlinx.coroutines.coroutineScope
import org.jsoup.nodes.Document
import org.koitharu.kotatsu.parsers.Broken
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.model.*
import org.koitharu.kotatsu.parsers.site.animebootstrap.AnimeBootstrapParser
import org.koitharu.kotatsu.parsers.util.*
import java.text.SimpleDateFormat
import java.util.EnumSet
import java.util.Locale
import java.util.*
@Broken
@MangaSourceParser("PAPSCAN", "PapScan", "fr")
internal class PapScan(context: MangaLoaderContext) :
AnimeBootstrapParser(context, MangaSource.PAPSCAN, "papscan.com") {
AnimeBootstrapParser(context, MangaParserSource.PAPSCAN, "papscan.com") {
override val sourceLocale: Locale = Locale.ENGLISH
override val isMultipleTagsSupported = false
override val listUrl = "/liste-manga"
override val selectState = "div.anime__details__widget li:contains(En cours)"
override val selectTag = "div.anime__details__widget li:contains(Genre) a"
@ -28,20 +28,20 @@ internal class PapScan(context: MangaLoaderContext) :
SortOrder.ALPHABETICAL_DESC,
)
override suspend fun getListPage(page: Int, filter: MangaListFilter?): List<Manga> {
override suspend fun getListPage(page: Int, order: SortOrder, filter: MangaListFilter): List<Manga> {
val url = buildString {
append("https://")
append(domain)
append("/filterList")
append("?page=")
append(page.toString())
when (filter) {
is MangaListFilter.Search -> {
when {
!filter.query.isNullOrEmpty() -> {
append("&alpha=")
append(filter.query.urlEncoded())
}
is MangaListFilter.Advanced -> {
else -> {
filter.tags.oneOrThrowIfMany()?.let {
append("&cat=")
@ -49,7 +49,7 @@ internal class PapScan(context: MangaLoaderContext) :
}
append("&sortBy=")
when (filter.sortOrder) {
when (order) {
SortOrder.POPULARITY -> append("views")
SortOrder.ALPHABETICAL_DESC -> append("name&asc=false")
SortOrder.ALPHABETICAL -> append("name&asc=true")
@ -57,8 +57,6 @@ internal class PapScan(context: MangaLoaderContext) :
}
}
null -> append("&sortBy=updated")
}
}
val doc = webClient.httpGet(url).parseHtml()
@ -68,22 +66,23 @@ internal class PapScan(context: MangaLoaderContext) :
id = generateUid(href),
url = href,
publicUrl = href.toAbsoluteUrl(div.host ?: domain),
coverUrl = div.selectFirstOrThrow("div.product__item__pic").attr("data-setbg").orEmpty(),
coverUrl = div.selectFirstOrThrow("div.product__item__pic")
.attrAsAbsoluteUrlOrNull("data-setbg"),
title = div.selectFirstOrThrow("div.product__item__text h5").text().orEmpty(),
altTitle = null,
altTitles = emptySet(),
rating = RATING_UNKNOWN,
tags = emptySet(),
author = null,
authors = emptySet(),
state = null,
source = source,
isNsfw = isNsfwSource,
contentRating = if (isNsfwSource) ContentRating.ADULT else null,
)
}
}
override suspend fun getAvailableTags(): Set<MangaTag> {
override suspend fun fetchAvailableTags(): Set<MangaTag> {
val doc = webClient.httpGet("https://$domain$listUrl").parseHtml()
return doc.select("a.category ").mapNotNullToSet { a ->
return doc.select("a.category ").mapToSet { a ->
val key = a.attr("href").substringAfterLast('=')
val name = a.text()
MangaTag(
@ -105,7 +104,7 @@ internal class PapScan(context: MangaLoaderContext) :
MangaState.ONGOING
}
manga.copy(
tags = doc.body().select(selectTag).mapNotNullToSet { a ->
tags = doc.body().select(selectTag).mapToSet { a ->
MangaTag(
key = a.attr("href").removeSuffix('/').substringAfterLast('/'),
title = a.text().toTitleCase(),
@ -125,10 +124,11 @@ internal class PapScan(context: MangaLoaderContext) :
val dateText = li.selectFirst("span.date-chapter-title-rtl")?.text()
MangaChapter(
id = generateUid(href),
name = li.selectFirstOrThrow("span em").text(),
number = i + 1,
title = li.selectFirstOrThrow("span em").text(),
number = i + 1f,
volume = 0,
url = href,
uploadDate = dateFormat.tryParse(dateText),
uploadDate = dateFormat.parseSafe(dateText),
source = source,
scanlator = null,
branch = null,

@ -2,9 +2,9 @@ package org.koitharu.kotatsu.parsers.site.animebootstrap.id
import org.koitharu.kotatsu.parsers.MangaLoaderContext
import org.koitharu.kotatsu.parsers.MangaSourceParser
import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.site.animebootstrap.AnimeBootstrapParser
@MangaSourceParser("KOMIKZOID", "KomikzoId", "id")
internal class KomikzoId(context: MangaLoaderContext) :
AnimeBootstrapParser(context, MangaSource.KOMIKZOID, "komikzoid.id")
AnimeBootstrapParser(context, MangaParserSource.KOMIKZOID, "komikzoid.id")

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save