Skip to content

Commit ba68e70

Browse files
author
updating-bot
committed
mirroring bot - 2025/10/03
1 parent b0215a3 commit ba68e70

28 files changed

+991
-700
lines changed

svn_trunk/src/jd/controlling/linkcrawler/LinkCrawlerDeepInspector.java

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,13 @@
44
import java.util.List;
55
import java.util.Set;
66

7+
import jd.controlling.linkcrawler.LinkCrawler.LinkCrawlerGeneration;
8+
import jd.http.Browser;
9+
import jd.http.URLConnectionAdapter;
10+
import jd.plugins.DownloadConnectionVerifier;
11+
import jd.plugins.Plugin;
12+
import jd.plugins.PluginForHost;
13+
714
import org.appwork.net.protocol.http.HTTPConstants;
815
import org.appwork.utils.Regex;
916
import org.appwork.utils.StringUtils;
@@ -14,13 +21,6 @@
1421
import org.jdownloader.plugins.controller.host.LazyHostPlugin;
1522
import org.jdownloader.plugins.controller.host.PluginFinder;
1623

17-
import jd.controlling.linkcrawler.LinkCrawler.LinkCrawlerGeneration;
18-
import jd.http.Browser;
19-
import jd.http.URLConnectionAdapter;
20-
import jd.plugins.DownloadConnectionVerifier;
21-
import jd.plugins.Plugin;
22-
import jd.plugins.PluginForHost;
23-
2424
public abstract class LinkCrawlerDeepInspector {
2525
/**
2626
* https://www.iana.org/assignments/media-types/media-types.xhtml
@@ -53,8 +53,11 @@ public boolean looksLikeDownloadableContent(final URLConnectionAdapter urlConnec
5353
final String contentDispositionHeader = urlConnection.getHeaderField(HTTPConstants.HEADER_RESPONSE_CONTENT_DISPOSITION);
5454
final String contentDispositionFileName = HTTPConnectionUtils.getFileNameFromDispositionHeader(contentDispositionHeader);
5555
final boolean inlineFlag = contentDispositionHeader.matches("(?i)^\\s*inline\\s*;?.*");
56-
if (inlineFlag && (contentDispositionFileName != null && !contentDispositionFileName.matches("(?i)^.*\\.html?$") && hasContentType && isHtmlContent(urlConnection))) {
57-
/* Filename is not a .html file but we got html code */
56+
if (inlineFlag && (contentDispositionFileName != null && contentDispositionFileName.matches("(?i)^.*\\.html?$") || (hasContentType && isHtmlContent(urlConnection)))) {
57+
// HTTP/1.1 200 OK
58+
// Content-Type: text/html;
59+
// Content-Disposition: inline; filename=error.html
60+
// We don't want to download content-disposition inline html.
5861
looksLike = false;
5962
} else {
6063
looksLike = true;

svn_trunk/src/jd/plugins/DecrypterRetryException.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ public static enum RetryReason {
77
CAPTCHA(_JDT.T.decrypter_wrongcaptcha()),
88
EMPTY_FOLDER(_JDT.T.decrypter_empty_folder()),
99
EMPTY_PROFILE(_JDT.T.decrypter_empty_profile()),
10+
EMPTY_SEARCH_QUERY("Search query without search results"),
1011
NO_ACCOUNT(_JDT.T.decrypter_invalidaccount()),
1112
FILE_NOT_FOUND(_JDT.T.decrypter_contentoffline()),
1213
PLUGIN_DEFECT(_JDT.T.decrypter_plugindefect()),
@@ -48,6 +49,8 @@ public String getComment() {
4849
/* Return default comment for some states. */
4950
if (this.reason == RetryReason.EMPTY_FOLDER) {
5051
return _JDT.T.decrypter_empty_folder_description();
52+
} else if (this.reason == RetryReason.EMPTY_SEARCH_QUERY) {
53+
return "The search query you've entered did not lead to any search results.";
5154
} else if (this.reason == RetryReason.GEO) {
5255
return _JDT.T.decrypter_unavailable_geo_description();
5356
} else if (this.reason == RetryReason.PLUGIN_SETTINGS) {

svn_trunk/src/jd/plugins/Plugin.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -886,6 +886,10 @@ public static enum PluginEnvironment {
886886
LINK_CHECK,
887887
ACCOUNT_CHECK;
888888

889+
public boolean isCurrentPluginEnvironment() {
890+
return this == getPluginEnvironment();
891+
}
892+
889893
public static PluginEnvironment getPluginEnvironment() {
890894
final Thread thread = Thread.currentThread();
891895
if (thread instanceof SingleDownloadController) {

svn_trunk/src/jd/plugins/decrypter/KemonoPartyCrawler.java

Lines changed: 52 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,20 @@
2525
import java.util.Map;
2626
import java.util.Set;
2727

28+
import org.appwork.net.protocol.http.HTTPConstants;
29+
import org.appwork.storage.TypeRef;
30+
import org.appwork.utils.DebugMode;
31+
import org.appwork.utils.Files;
32+
import org.appwork.utils.Regex;
33+
import org.appwork.utils.StringUtils;
34+
import org.appwork.utils.parser.UrlQuery;
35+
import org.jdownloader.controlling.filter.CompiledFiletypeFilter;
36+
import org.jdownloader.plugins.components.config.KemonoPartyConfig;
37+
import org.jdownloader.plugins.components.config.KemonoPartyConfig.TextCrawlMode;
38+
import org.jdownloader.plugins.components.config.KemonoPartyConfigCoomerParty;
39+
import org.jdownloader.plugins.config.PluginJsonConfig;
40+
import org.jdownloader.plugins.controller.LazyPlugin;
41+
2842
import jd.PluginWrapper;
2943
import jd.controlling.ProgressController;
3044
import jd.controlling.linkcrawler.CrawledLink;
@@ -43,21 +57,7 @@
4357
import jd.plugins.PluginForDecrypt;
4458
import jd.plugins.hoster.KemonoParty;
4559

46-
import org.appwork.net.protocol.http.HTTPConstants;
47-
import org.appwork.storage.TypeRef;
48-
import org.appwork.utils.DebugMode;
49-
import org.appwork.utils.Files;
50-
import org.appwork.utils.Regex;
51-
import org.appwork.utils.StringUtils;
52-
import org.appwork.utils.parser.UrlQuery;
53-
import org.jdownloader.controlling.filter.CompiledFiletypeFilter;
54-
import org.jdownloader.plugins.components.config.KemonoPartyConfig;
55-
import org.jdownloader.plugins.components.config.KemonoPartyConfig.TextCrawlMode;
56-
import org.jdownloader.plugins.components.config.KemonoPartyConfigCoomerParty;
57-
import org.jdownloader.plugins.config.PluginJsonConfig;
58-
import org.jdownloader.plugins.controller.LazyPlugin;
59-
60-
@DecrypterPlugin(revision = "$Revision: 51471 $", interfaceVersion = 3, names = {}, urls = {})
60+
@DecrypterPlugin(revision = "$Revision: 51615 $", interfaceVersion = 3, names = {}, urls = {})
6161
public class KemonoPartyCrawler extends PluginForDecrypt {
6262
public KemonoPartyCrawler(PluginWrapper wrapper) {
6363
super(wrapper);
@@ -107,12 +107,12 @@ public static String[] getAnnotationUrls() {
107107
public static String[] buildAnnotationUrls(final List<String[]> pluginDomains) {
108108
final List<String> ret = new ArrayList<String>();
109109
for (final String[] domains : pluginDomains) {
110-
ret.add("https?://(?:www\\.)?" + buildHostsPatternPart(domains) + "/[^/]+/user/([\\w\\-\\.]+(\\?o=(\\d+))?)(/post/[a-z0-9]+)?");
110+
ret.add("https?://(?:www\\.)?" + buildHostsPatternPart(domains) + "/[^/]+/user/([\\w\\-\\.]+(\\?.+)?)(/post/[a-z0-9]+)?");
111111
}
112112
return ret.toArray(new String[0]);
113113
}
114114

115-
private final String TYPE_PROFILE = "(?i)(?:https?://[^/]+)?/([^/]+)/user/([\\w\\-\\.]+)(\\?o=(\\d+))?$";
115+
private final String TYPE_PROFILE = "(?i)(?:https?://[^/]+)?/([^/]+)/user/([\\w\\-\\.]+)(\\?.+)?$";
116116
private final String TYPE_POST = "(?i)(?:https?://[^/]+)?/([^/]+)/user/([\\w\\-\\.]+)/post/([a-z0-9]+)$";
117117
private KemonoParty hostPlugin = null;
118118
private CryptedLink cl = null;
@@ -150,19 +150,15 @@ private ArrayList<DownloadLink> crawlProfile(final CryptedLink param) throws Exc
150150
}
151151
final String service = urlinfo.getMatch(0);
152152
final String userID = urlinfo.getMatch(1);
153-
final String startOffsetStr = urlinfo.getMatch(3);
154-
Integer startOffset = null;
155-
if (startOffsetStr != null) {
156-
startOffset = Integer.parseInt(startOffsetStr);
157-
}
158-
return crawlProfileAPI(service, userID, startOffset);
153+
final UrlQuery query = UrlQuery.parse(urlinfo.getMatch(2));
154+
return crawlProfileAPI(service, userID, query);
159155
}
160156

161157
/**
162158
* @param startOffset
163159
* : If provided, only this offset/page will be crawled.
164160
*/
165-
private ArrayList<DownloadLink> crawlProfileAPI(final String service, final String usernameOrUserID, final Integer startOffset) throws Exception {
161+
private ArrayList<DownloadLink> crawlProfileAPI(final String service, final String usernameOrUserID, final UrlQuery query) throws Exception {
166162
if (service == null || usernameOrUserID == null) {
167163
/* Developer mistake */
168164
throw new PluginException(LinkStatus.ERROR_PLUGIN_DEFECT);
@@ -173,25 +169,42 @@ private ArrayList<DownloadLink> crawlProfileAPI(final String service, final Stri
173169
final ArrayList<DownloadLink> ret = new ArrayList<DownloadLink>();
174170
final FilePackage profileFilePackage = getFilePackageForProfileCrawler(service, usernameOrUserID);
175171
int offset = 0;
176-
if (startOffset != null) {
177-
logger.info("Starting from offset: " + startOffset);
178-
offset = startOffset.intValue();
172+
String offsetString = null;
173+
String qString = null;
174+
if (query != null) {
175+
qString = query.get("q");
176+
offsetString = query.getDecoded("o");
177+
if (offsetString != null && offsetString.matches("^\\d+$")) {
178+
logger.info("Starting from offset: " + offsetString);
179+
offset = Integer.parseInt(offsetString);
180+
}
181+
}
182+
if (qString == null) {
183+
qString = "";
184+
} else {
185+
qString = "&q=" + qString;
179186
}
180187
int page = 1;
181188
final int maxItemsPerPage = 50;
182189
int numberofContinuousPagesWithoutAnyNewItems = 0;
183190
final int maxPagesWithoutNewItems = 15;
184191
final Set<String> retryWithSinglePostAPI = new HashSet<String>();
185-
do {
186-
getPage(br, this.getApiBase() + "/" + service + "/user/" + Encoding.urlEncode(usernameOrUserID) + "/posts?o=" + offset);
192+
pagination: do {
193+
getPage(br, this.getApiBase() + "/" + service + "/user/" + Encoding.urlEncode(usernameOrUserID) + "/posts?o=" + offset + qString);
187194
final List<Map<String, Object>> posts = (List<Map<String, Object>>) restoreFromString(br.getRequest().getHtmlCode(), TypeRef.OBJECT);
188195
if (posts == null || posts.isEmpty()) {
189196
if (ret.isEmpty() && retryWithSinglePostAPI.isEmpty()) {
197+
if (!StringUtils.isEmpty(qString)) {
198+
// TODO: Use exception down below once RetryReason.EMPTY_SEARCH_QUERY is live.
199+
// throw new DecrypterRetryException(RetryReason.EMPTY_SEARCH_QUERY);
200+
logger.info("Stopping because: Search query revealed zero results");
201+
break pagination;
202+
}
190203
throw new PluginException(LinkStatus.ERROR_FILE_NOT_FOUND);
191204
} else {
192205
/* This should never happen */
193206
logger.info("Stopping because: Got empty page");
194-
break;
207+
break pagination;
195208
}
196209
}
197210
final int numberofUniqueItemsOld = dupes.size();
@@ -227,16 +240,16 @@ private ArrayList<DownloadLink> crawlProfileAPI(final String service, final Stri
227240
}
228241
if (this.isAbort()) {
229242
logger.info("Stopping because: Aborted by user");
230-
break;
231-
} else if (startOffset != null) {
232-
logger.info("Stopping because: User provided specific offset to crawl: " + startOffset);
233-
break;
243+
break pagination;
244+
} else if (StringUtils.isNotEmpty(offsetString)) {
245+
logger.info("Stopping because: User provided specific offset to crawl: " + offsetString);
246+
break pagination;
234247
} else if (numberofContinuousPagesWithoutAnyNewItems >= maxPagesWithoutNewItems) {
235248
logger.info("Stopping because: Too many pages without any new items: " + maxPagesWithoutNewItems);
236-
break;
249+
break pagination;
237250
} else if (posts.size() < maxItemsPerPage) {
238251
logger.info("Stopping because: Reached last page(?) Page: " + page);
239-
break;
252+
break pagination;
240253
} else {
241254
/* Continue to next page */
242255
offset += posts.size();
@@ -483,7 +496,9 @@ protected boolean removeEldestEntry(Map.Entry<String, String> eldest) {
483496
};
484497

485498
/**
486-
* Returns userID for given username. </br> Uses API to find userID. </br> Throws Exception if it is unable to find userID.
499+
* Returns userID for given username. </br>
500+
* Uses API to find userID. </br>
501+
* Throws Exception if it is unable to find userID.
487502
*/
488503
private String findUsername(final String service, final String usernameOrUserID) throws Exception {
489504
synchronized (ID_TO_USERNAME) {

svn_trunk/src/jd/plugins/decrypter/SankakucomplexComCrawler.java

Lines changed: 19 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
import jd.controlling.AccountController;
2626
import jd.controlling.ProgressController;
2727
import jd.http.Browser;
28-
import jd.http.requests.GetRequest;
28+
import jd.http.Request;
2929
import jd.nutils.encoding.Encoding;
3030
import jd.parser.Regex;
3131
import jd.plugins.Account;
@@ -39,7 +39,6 @@
3939
import jd.plugins.PluginForDecrypt;
4040
import jd.plugins.hoster.SankakucomplexCom;
4141

42-
import org.appwork.net.protocol.http.HTTPConstants;
4342
import org.appwork.storage.TypeRef;
4443
import org.appwork.utils.StringUtils;
4544
import org.appwork.utils.encoding.URLEncode;
@@ -49,18 +48,12 @@
4948
import org.jdownloader.plugins.config.PluginJsonConfig;
5049
import org.jdownloader.plugins.controller.LazyPlugin;
5150

52-
@DecrypterPlugin(revision = "$Revision: 51605 $", interfaceVersion = 3, names = {}, urls = {})
51+
@DecrypterPlugin(revision = "$Revision: 51610 $", interfaceVersion = 3, names = {}, urls = {})
5352
public class SankakucomplexComCrawler extends PluginForDecrypt {
5453
public SankakucomplexComCrawler(PluginWrapper wrapper) {
5554
super(wrapper);
5655
}
5756

58-
/*
59-
* 2025-04-22: Looks like API doesn't work anymore or at least it fails for a lot of items so for now let's always prefer website in
60-
* auto mode.
61-
*/
62-
private static final boolean ACCESS_MODE_AUTO_PREFER_API_MODE = false;
63-
6457
@Override
6558
public LazyPlugin.FEATURE[] getFeatures() {
6659
return new LazyPlugin.FEATURE[] { LazyPlugin.FEATURE.IMAGE_GALLERY, LazyPlugin.FEATURE.BUBBLE_NOTIFICATION };
@@ -118,7 +111,7 @@ public static String[] buildAnnotationUrls(final List<String[]> pluginDomains) {
118111
private static final Pattern TYPE_TAGS_BOOKS = Pattern.compile("/(([a-z]{2,3})/?)?books\\?tags=([^&]+)", Pattern.CASE_INSENSITIVE);
119112
private static final Pattern TYPE_TAGS_POSTS = Pattern.compile("/(([a-z]{2,3})/?)?(?:posts)?\\?tags=([^&]+)", Pattern.CASE_INSENSITIVE);
120113

121-
public static final String API_BASE_NEW = "https://sankakuapi.com";
114+
public static final String API_BASE = "https://sankakuapi.com";
122115
private SankakucomplexCom hosterplugin = null;
123116

124117
@Override
@@ -128,9 +121,9 @@ public void clean() {
128121
}
129122

130123
public ArrayList<DownloadLink> decryptIt(final CryptedLink param, ProgressController progress) throws Exception {
124+
hosterplugin = (SankakucomplexCom) this.getNewPluginForHostInstance(this.getHost());
131125
final Account account = AccountController.getInstance().getValidAccount(this.getHost());
132126
if (account != null) {
133-
hosterplugin = (SankakucomplexCom) this.getNewPluginForHostInstance(this.getHost());
134127
hosterplugin.login(account, false);
135128
}
136129
final String contenturl = param.getCryptedUrl();
@@ -164,7 +157,7 @@ private ArrayList<DownloadLink> crawlTagsPosts(final CryptedLink param, final Ac
164157
* Some items are only visible for logged in users and are never returned via API. </br> For this reason, some user may prefer
165158
* website mode.
166159
*/
167-
if (mode == AccessMode.API || (mode == AccessMode.AUTO && ACCESS_MODE_AUTO_PREFER_API_MODE)) {
160+
if (mode == AccessMode.API || (mode == AccessMode.AUTO && hosterplugin.allowUseAPI(account, SankakucomplexCom.API_METHOD.OTHER))) {
168161
return crawlTagsPostsAPI(account, param, tags, language);
169162
} else {
170163
return crawlTagsPostsWebsite(account, param, tags, language);
@@ -356,12 +349,10 @@ private ArrayList<DownloadLink> crawlTagsPostsAPI(final Account account, final C
356349
int page = 1;
357350
int position = 1;
358351
pagination: do {
359-
if (true) {
360-
// unfinished API stuff
361-
throw new PluginException(LinkStatus.ERROR_PLUGIN_DEFECT);
362-
}
363-
br.getPage(API_BASE_NEW + "/posts/keyset?" + query.toString());
364-
final Map<String, Object> entries = restoreFromString(br.getRequest().getHtmlCode(), TypeRef.MAP);
352+
final Browser brc = createNewBrowserInstance();
353+
final Request request = hosterplugin.addAPIToken(brc.createGetRequest(API_BASE + "/posts/keyset?" + query.toString()), account);
354+
brc.getPage(request);
355+
final Map<String, Object> entries = restoreFromString(brc.getRequest().getHtmlCode(), TypeRef.MAP);
365356
final Map<String, Object> meta = (Map<String, Object>) entries.get("meta");
366357
final String nextPageHash = (String) meta.get("next");
367358
final List<Map<String, Object>> data = (List<Map<String, Object>>) entries.get("data");
@@ -416,9 +407,12 @@ private ArrayList<DownloadLink> crawlTagsBooksAPI(final CryptedLink param, final
416407
return ret;
417408
}
418409
final Regex urlinfo = new Regex(param.getCryptedUrl(), TYPE_TAGS_BOOKS);
419-
final String languageFromURL = urlinfo.getMatch(1);
410+
String languageFromURL = urlinfo.getMatch(1);
411+
if (languageFromURL == null) {
412+
languageFromURL = "en";
413+
}
420414
String tags = urlinfo.getMatch(2);
421-
if (languageFromURL == null || tags == null) {
415+
if (tags == null) {
422416
/* Developer mistake */
423417
throw new PluginException(LinkStatus.ERROR_PLUGIN_DEFECT);
424418
}
@@ -432,12 +426,10 @@ private ArrayList<DownloadLink> crawlTagsBooksAPI(final CryptedLink param, final
432426
query.appendEncoded("pool_type", "0");
433427
int page = 1;
434428
pagination: do {
435-
if (true) {
436-
// unfinished API stuff
437-
throw new PluginException(LinkStatus.ERROR_PLUGIN_DEFECT);
438-
}
439-
br.getPage(API_BASE_NEW + "/pools/keyset?" + query.toString());
440-
final Map<String, Object> entries = restoreFromString(br.getRequest().getHtmlCode(), TypeRef.MAP);
429+
final Browser brc = createNewBrowserInstance();
430+
final Request request = hosterplugin.addAPIToken(brc.createGetRequest(API_BASE + "/pools/keyset?" + query.toString()), account);
431+
brc.getPage(request);
432+
final Map<String, Object> entries = restoreFromString(brc.getRequest().getHtmlCode(), TypeRef.MAP);
441433
final Map<String, Object> meta = (Map<String, Object>) entries.get("meta");
442434
final String nextPageHash = (String) meta.get("next");
443435
final List<Map<String, Object>> data = (List<Map<String, Object>>) entries.get("data");
@@ -495,13 +487,7 @@ private ArrayList<DownloadLink> crawlBook(final CryptedLink param, final Account
495487
final ArrayList<DownloadLink> ret = new ArrayList<DownloadLink>();
496488
final Browser brc = this.createNewBrowserInstance();
497489
brc.setAllowedResponseCodes(400);
498-
final GetRequest request = brc.createGetRequest(SankakucomplexComCrawler.API_BASE_NEW + "/pools/" + bookID + "?lang=" + language + "&includes[]=series&exceptStatuses[]=deleted");
499-
if (account != null) {
500-
final String accessToken = hosterplugin.getAPIToken(account);
501-
request.getHeaders().put(HTTPConstants.HEADER_REQUEST_ORIGIN, "https://www.sankakucomplex.com");
502-
request.getHeaders().put(HTTPConstants.HEADER_REQUEST_REFERER, "https://www.sankakucomplex.com/");
503-
request.getHeaders().put(HTTPConstants.HEADER_REQUEST_AUTHORIZATION, "Bearer " + accessToken);
504-
}
490+
final Request request = hosterplugin.addAPIToken(brc.createGetRequest(SankakucomplexComCrawler.API_BASE + "/pools/" + bookID + "?lang=" + language + "&includes[]=series&exceptStatuses[]=deleted"), account);
505491
brc.getPage(request);
506492
if (brc.getHttpConnection().getResponseCode() == 404) {
507493
throw new PluginException(LinkStatus.ERROR_FILE_NOT_FOUND);

0 commit comments

Comments
 (0)