From f2416d02e37dd0ebfac04a0687b3a6bb6119bfc0 Mon Sep 17 00:00:00 2001 From: Kastang Date: Sat, 31 Dec 2016 21:03:44 -0500 Subject: [PATCH] implements basic support for #407 'gonewilder' functionality. When passing the -A flag to a url matching 'reddit.com/r' pattern, it will rip the submission authors content rather then the provided subreddit url. Content will be saved in the same format as calling a reddit.com/u url directly --- src/main/java/com/rarchives/ripme/App.java | 19 ++++++++++ .../ripme/ripper/rippers/RedditRipper.java | 35 +++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/src/main/java/com/rarchives/ripme/App.java b/src/main/java/com/rarchives/ripme/App.java index e6d22aac6..ad564d9b7 100644 --- a/src/main/java/com/rarchives/ripme/App.java +++ b/src/main/java/com/rarchives/ripme/App.java @@ -13,6 +13,8 @@ import java.util.List; import javax.swing.SwingUtilities; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.CommandLine; @@ -167,6 +169,20 @@ public static void handleArguments(String[] args) { if (cl.hasOption('u')) { String url = cl.getOptionValue('u').trim(); + + // the -A option is limited to just reddit.com/r urls for the time being + // if the -A does not match a reddit.com/r regex, then ripme should fail out. + if(cl.hasOption('A')) { + Pattern p = Pattern.compile("^https?://(www\\.)?reddit.com/r/.*$"); + Matcher m = p.matcher(url); + + if (!m.matches()) { + logger.error("[!] Supplied URL does not meet -A requirements"); + System.exit(-1); + } + Utils.setConfigBoolean("download.rip_authors", true); + } + ripURL(url, cl.hasOption("n")); } } @@ -207,6 +223,9 @@ public static Options getOptions() { opts.addOption("l", "ripsdirectory", true, "Rips Directory (Default: ./rips)"); opts.addOption("n", "no-prop-file", false, "Do not create properties file."); opts.addOption("f", "urls-file", true, "Rip URLs from a file."); + + opts.addOption("A", "rip-authors", false, "REDDIT ONLY. Will rip all authors from a given subreddit."); + return opts; } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java index 067e7866a..386e093db 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java @@ -18,6 +18,9 @@ import com.rarchives.ripme.utils.RipUtils; import com.rarchives.ripme.utils.Utils; +import com.rarchives.ripme.ripper.AbstractRipper; + + public class RedditRipper extends AlbumRipper { public RedditRipper(URL url) throws IOException { @@ -34,6 +37,7 @@ public RedditRipper(URL url) throws IOException { //private static final String USER_AGENT = "ripme by /u/4_pr0n github.com/4pr0n/ripme"; private long lastRequestTime = 0; + private Boolean rip_authors = Utils.getConfigBoolean("download.rip_authors", false); @Override public boolean canRip(URL url) { @@ -59,6 +63,14 @@ private URL getJsonURL(URL url) throws MalformedURLException { @Override public void rip() throws IOException { + + // once we begin the rip process, clear download.rip_authors + // the value is preserved for this session and will not + // persist when getAndParseAndReturnNext spawns additional + // AbstractRipper processes + Utils.setConfigBoolean("download.rip_authors", false); + + URL jsonURL = getJsonURL(this.url); while (true) { jsonURL = getAndParseAndReturnNext(jsonURL); @@ -86,6 +98,29 @@ private URL getAndParseAndReturnNext(URL url) throws IOException { } children = data.getJSONArray("children"); for (int j = 0; j < children.length(); j++) { + + + if(rip_authors) { + JSONObject child_data = children.getJSONObject(j).getJSONObject("data"); + String author = child_data.getString("author"); + + logger.info("[OVERRIDE]: Ripping Author: " + author); + + // spawn a new AbstractRipper for the authors page + try { + URL author_url = new URL("http://reddit.com/user/" + author); + + AbstractRipper ripper = AbstractRipper.getRipper(author_url); + ripper.setup(); + ripper.rip(); + } catch(Exception e) { + logger.error("[!] AbstractRipper failed. Cannot continue."); + System.exit(-1); + } + + continue; + } + parseJsonChild(children.getJSONObject(j)); } if (data.has("after") && !data.isNull("after")) {