diff --git a/Pipfile.lock b/Pipfile.lock index 3c20aa9..2a6ebfe 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -18,10 +18,10 @@ "default": { "certifi": { "hashes": [ - "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", - "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" + "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3", + "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f" ], - "version": "==2019.9.11" + "version": "==2019.11.28" }, "chardet": { "hashes": [ @@ -54,22 +54,20 @@ }, "pyyaml": { "hashes": [ - "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", - "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", - "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", - "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", - "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", - "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", - "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", - "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", - "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", - "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", - "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", - "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", - "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" + "sha256:0e7f69397d53155e55d10ff68fdfb2cf630a35e6daf65cf0bdeaf04f127c09dc", + "sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803", + "sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc", + "sha256:38a4f0d114101c58c0f3a88aeaa44d63efd588845c5a2df5290b73db8f246d15", + "sha256:483eb6a33b671408c8529106df3707270bfacb2447bf8ad856a4b4f57f6e3075", + "sha256:4b6be5edb9f6bb73680f5bf4ee08ff25416d1400fbd4535fe0069b2994da07cd", + "sha256:7f38e35c00e160db592091751d385cd7b3046d6d51f578b29943225178257b31", + "sha256:8100c896ecb361794d8bfdb9c11fce618c7cf83d624d73d5ab38aef3bc82d43f", + "sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c", + "sha256:e4c015484ff0ff197564917b4b4246ca03f411b9bd7f16e02a2f586eb48b6d04", + "sha256:ebc4ed52dcc93eeebeae5cf5deb2ae4347b3a81c3fa12b0b8c976544829396a4" ], "index": "pypi", - "version": "==5.1.2" + "version": "==5.2" }, "requests": { "hashes": [ @@ -80,10 +78,10 @@ }, "six": { "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", + "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" ], - "version": "==1.12.0" + "version": "==1.13.0" }, "update-checker": { "hashes": [ @@ -94,10 +92,10 @@ }, "urllib3": { "hashes": [ - "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", - "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" + "sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293", + "sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745" ], - "version": "==1.25.6" + "version": "==1.25.7" }, "websocket-client": { "hashes": [ diff --git a/riddle.py b/riddle.py old mode 100644 new mode 100755 index 900ae9d..73ec070 --- a/riddle.py +++ b/riddle.py @@ -10,7 +10,7 @@ import optparse import zipfile import urllib.request as urlreq -user_agent = 'python:riddle:3.0 (by u/Trivernis)' # the reddit api user-agent +user_agent = 'linux:riddle:3.0 (by u/Trivernis)' # the reddit api user-agent img_ext = ['jpg', 'jpeg', 'png'] # default used extensions to filter for images min_size = 5 # minimum size in kilobytes. changeable in settings @@ -161,8 +161,14 @@ def get_images(reddit_client: praw.Reddit, subreddit: str, limit: int, nsfw: boo :return: list of images """ print('[~] Fetching images for r/%s...' % subreddit) - urls = [submission.url for submission in reddit_client.subreddit(subreddit).hot(limit=limit) - if not submission.over_18 or nsfw] # fetches hot images and filters nsfw if set to false + urls = [] + try: + for submission in reddit_client.subreddit(subreddit).hot(limit=limit): + if not submission.over_18 or nsfw: + urls.append(submission.url) + print('\r[~] %s images' % len(urls), end='\r') + except Exception as e: + print(e) return [url for url in urls if url.split('.')[-1] in img_ext]