Compare commits
	
		
			30 Commits
		
	
	
		
			master
			...
			syndicatio
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 0ae9d3ddd7 | |||
| 84e647a390 | |||
| 9fea166b94 | |||
| 2ff8027d34 | |||
| 4c16afebf9 | |||
| 4e4f2f2a7a | |||
| 9b2e9ed74d | |||
| db14ea6b3d | |||
| eec7bc5fff | |||
| 0e80b8220a | |||
| 9fea7d8275 | |||
|  | 57bd52e537 | ||
|  | 0eb134136f | ||
|  | fd96708790 | ||
|  | 0bccc33730 | ||
|  | de78ad42b8 | ||
|  | 9fbdf2a8a7 | ||
|  | 26b7d47996 | ||
| 7d474af899 | |||
| 3bf37386fc | |||
| e4af5bbd6a | |||
| 972165018c | |||
|  | 8a723c2bc8 | ||
|  | f2afe19bc9 | ||
|  | 10740cae92 | ||
|  | d8c4cb47fa | ||
|  | 9905a15ace | ||
|  | 427d4ff972 | ||
| d9e998750c | |||
| 0b8b5ce498 | 
| @ -7,9 +7,9 @@ Simple and stylish text-to-html microblog generator. | ||||
| 
 | ||||
|     python3 dateutil toml make curl pycurl urllib | ||||
| 
 | ||||
| * `dateutil`, `toml` are Python modules.  | ||||
| * `dateutil`, `toml`, `json`, `pycurl`, `hashlib` are Python modules.  | ||||
| * `make` (optional), method for invoking the script.  | ||||
| * `curl`, `pycurl` and `urllib` (optional), for uploading multiple files to neocities (`neouploader.py`). | ||||
| * `urllib` (optional), for uploading multiple files to neocities (`neouploader.py`). | ||||
| 
 | ||||
| ### Usage | ||||
| 
 | ||||
|  | ||||
| @ -1,22 +1,23 @@ | ||||
| 
 | ||||
| 
 | ||||
| all: template.tpl content.txt timeline.css | ||||
| all: demo tpl css settings | ||||
| 	python microblog.py ./template.tpl ./content.txt > result.html | ||||
| 
 | ||||
| # for people who don't want to read the README 
 | ||||
| # and want to hit `make` to see how things work.
 | ||||
| template.tpl: | ||||
| tpl: | ||||
| 	cp ./example/default.tpl ./template.tpl | ||||
| 
 | ||||
| timeline.css: | ||||
| css: | ||||
| 	cp ./example/timeline.css ./timeline.css | ||||
| 
 | ||||
| content.txt: | ||||
| demo: | ||||
| 	cp ./example/demo.txt ./content.txt | ||||
| 
 | ||||
| settings: | ||||
| 	cp ./example/settings.toml ./settings.toml | ||||
| 
 | ||||
| .PHONY: clean | ||||
| clean:  | ||||
| 	rm ./pages/*.html  | ||||
| 	rm ./tags/*/*.html  | ||||
| 	rm lastfullpage.txt | ||||
| 	rmdir ./pages ./tags/* ./tags | ||||
| 	rm ./webring/*.html  | ||||
| 	rmdir ./pages ./tags/* ./tags ./webring | ||||
|  | ||||
| @ -1,4 +1,5 @@ | ||||
| latestpage="result.html" | ||||
| # latestpage="result.html" | ||||
| latestpages=["meta.json", "result.html"] | ||||
| 
 | ||||
| [page] | ||||
| postsperpage = 20 | ||||
| @ -28,3 +29,37 @@ interact = "https://yoursite.tld/cgi?postid=" | ||||
| [post.gallery] | ||||
| path_to_thumb="./thumbs" | ||||
| path_to_fullsize="./images" | ||||
| 
 | ||||
| [webring] | ||||
| enabled=false | ||||
| file_output="meta.json" | ||||
| 
 | ||||
| [webring.profile] | ||||
| username="Your name here" | ||||
| url="https://yourdomain.tld/microblog/" | ||||
| avatar="https://yourdomain.tld/microblog/images/avatar.jpg" | ||||
| short_bio= "Your self-description. Anything longer than 150 characters is truncated." | ||||
| 
 | ||||
| [webring.following] | ||||
| list= ["https://likho.neocities.org/microblog/meta.json"] | ||||
| format=""" | ||||
| <div class="fill"> | ||||
|     <div class="postcell"> | ||||
|         <img src="{__avatar__}" alt="Avatar" class="avatar"> | ||||
|         <span class="wrapper""> | ||||
|             <div class="handle"> | ||||
|                 <a href="{__url__}">{__handle__}</a> | ||||
|             </div> | ||||
|             <div class="last-updated">Last Update: {__lastupdated__}</div> | ||||
|             <span class="post-count">Posts: {__post_count__}</span> | ||||
|         </span> | ||||
|         <p class="short-bio">{__shortbio__}</p> | ||||
|     </div> | ||||
| </div> | ||||
| """ | ||||
| 
 | ||||
| # internally link avatars - avoids hotlinks | ||||
| [webring.following.internal-avatars] | ||||
| enabled=false | ||||
| path_to_avatars="/microblog/avatars" # link rendered on page | ||||
| local_path_to_avatars="./avatars" # destination folder on pc | ||||
|  | ||||
| @ -31,28 +31,29 @@ | ||||
|     color: green; | ||||
|     font-weight: bold; | ||||
| } | ||||
| .profile { | ||||
|     vertical-align: middle; | ||||
|     padding-left: 10px; | ||||
|     border:1px solid blue; | ||||
| } | ||||
| .avatar { | ||||
|     vertical-align: middle; | ||||
|     width: 50px; | ||||
|     height: 50px; | ||||
| } | ||||
| .handle{ | ||||
| .column .profile { | ||||
|     vertical-align: middle; | ||||
|     padding-left: 10px; | ||||
|     padding:1%; | ||||
|     border:1px solid blue; | ||||
| } | ||||
| .column .profile .handle{ | ||||
|     font-size: 1.1em; | ||||
|     font-weight: bold; | ||||
| } | ||||
| .email{ | ||||
|     text-align:left; | ||||
| .column .profile .email{ | ||||
|     font-size: 0.8em; | ||||
|     text-align:left; | ||||
|     text-decoration:none; | ||||
| } | ||||
| .bio { | ||||
|     vertical-align: middle; | ||||
| .column .profile .bio { | ||||
|     font-size: 0.9em; | ||||
|     vertical-align: middle; | ||||
|     margin: 1em | ||||
| } | ||||
| .gallery { | ||||
| @ -73,6 +74,28 @@ | ||||
|     border: 1px solid #777; | ||||
|     filter: invert(100%); | ||||
| } | ||||
| .postcell .avatar { | ||||
|     margin-left:3%; | ||||
|     margin-top:2%; | ||||
|     height: 4em; | ||||
|     width:auto; | ||||
|     vertical-align:top; | ||||
| } | ||||
| .postcell .wrapper { | ||||
|     margin-top:2%; | ||||
|     display: inline-block; | ||||
| } | ||||
| .postcell .wrapper .last-updated, | ||||
| .postcell .wrapper .post-count { | ||||
|     font-size: 1em; | ||||
|     color:grey; | ||||
| } | ||||
| .postcell .short-bio{ | ||||
|     padding-left: 3%; | ||||
|     padding-right: 2%; | ||||
|     font-style: italic; | ||||
|     word-wrap: break-word; | ||||
| } | ||||
| /* Clear floats after the columns */ | ||||
| .row:after { | ||||
|     content: ""; | ||||
|  | ||||
							
								
								
									
										217
									
								
								microblog.py
									
									
									
									
									
								
							
							
						
						
									
										217
									
								
								microblog.py
									
									
									
									
									
								
							| @ -1,6 +1,7 @@ | ||||
| 
 | ||||
| import sys, os, traceback | ||||
| import dateutil.parser | ||||
| from time import strftime, localtime | ||||
| 
 | ||||
| # returns html-formatted string | ||||
| def make_buttons(btn_dict, msg_id): | ||||
| @ -52,6 +53,8 @@ def make_gallery(indices, w, conf=None): | ||||
|     tag.append("</div>") | ||||
|     return tag | ||||
| 
 | ||||
| # apply basic HTML formatting - only div class here is gallery | ||||
| from html import escape | ||||
| def markup(message, config): | ||||
|     def is_image(s, image_formats): | ||||
|         l = s.rsplit('.', maxsplit=1) | ||||
| @ -116,8 +119,6 @@ def markup(message, config): | ||||
|             gallery = [] | ||||
|     return sep.join(output), tags | ||||
| 
 | ||||
| # apply basic HTML formatting - only div class here is gallery | ||||
| from html import escape | ||||
| class Post: | ||||
|     def __init__(self, ts, msg): | ||||
|         self.timestamp = ts.strip() # string | ||||
| @ -160,8 +161,7 @@ def parse_txt(filename): | ||||
|                 state = 0  | ||||
|     return posts | ||||
| 
 | ||||
| def get_posts(filename, config): | ||||
|     posts = parse_txt(filename) | ||||
| def get_posts(posts, config): | ||||
|     taginfos = [] | ||||
|     tagcloud = dict() # (tag, count) | ||||
|     tagged   = dict() # (tag, index of message) | ||||
| @ -273,15 +273,19 @@ if __name__ == "__main__": | ||||
|         p = argparse.ArgumentParser() | ||||
|         p.add_argument("template", help="an html template file") | ||||
|         p.add_argument("content", help="text file for microblog content") | ||||
|         p.add_argument("--sort",  \ | ||||
|         p.add_argument("--sort", \ | ||||
|             help="sorts content from oldest to newest" | ||||
|                 " (this is a separate operation from page generation)", \ | ||||
|             action="store_true") | ||||
|         p.add_argument("--skip-fetch", \ | ||||
|             help="skips fetching profile data from remote sources;" | ||||
|                  " has no effect if webring is not enabled",\ | ||||
|             action="store_true") | ||||
|         args = p.parse_args() | ||||
|         if args.sort: | ||||
|             sort(args.content) | ||||
|             exit() | ||||
|         return args.template, args.content | ||||
|         return args.template, args.content, args.skip_fetch | ||||
| 
 | ||||
|     # assume relative path | ||||
|     def demote_css(template, css_list,  level=1): | ||||
| @ -296,14 +300,12 @@ if __name__ == "__main__": | ||||
|             tpl = tpl.replace(css, ("%s%s" % (prepend, css) )) | ||||
|         return tpl | ||||
| 
 | ||||
| # needs review / clean-up | ||||
| # ideally relate 'lvl' with sub dir instead of hardcoding | ||||
|     def writepage(template, timeline, tagcloud, config, subdir = None): | ||||
|         count  = len(timeline) | ||||
|         html   = "" | ||||
|         with open(template,'r') as f: | ||||
|             html = f.read() | ||||
|         try: | ||||
|             count  = len(timeline) | ||||
|             p      = config["postsperpage"] | ||||
|             pagectrl = Paginator(count, p, subdir) | ||||
|         except ZeroDivisionError as e: | ||||
| @ -312,28 +314,33 @@ if __name__ == "__main__": | ||||
|         except Exception as e: | ||||
|             print("error: ",e, ("(number of posts = %i)" % count), file=sys.stderr) | ||||
|             exit() | ||||
|         latest = timeline if count <= pagectrl.PPP else timeline[:pagectrl.PPP] | ||||
|         latest = timeline[:pagectrl.PPP] | ||||
|         link_from_top    = "./tags/%s/latest.html" | ||||
|         link_from_subdir = "../tags/%s/latest.html" | ||||
|         link_from_tagdir = "../%s/latest.html" | ||||
|         cloud = "" | ||||
|         level = 1 | ||||
|         is_tagline = False | ||||
|         if subdir == None: # if top level page | ||||
|             lvl    = 1 | ||||
|             tcloud = make_tagcloud(tagcloud, "./tags/%s/latest.html") | ||||
|             print(pagectrl.singlepage(html, tcloud, latest)) | ||||
|             tcloud = make_tagcloud(tagcloud, "../tags/%s/latest.html") | ||||
|             pagectrl.paginate( | ||||
|                 demote_css(html, config["relative_css"], lvl),  | ||||
|                 tcloud, timeline | ||||
|             ) | ||||
|         else: # if timelines per tag | ||||
|             is_tagline = True | ||||
|             lvl = 2 | ||||
|             newhtml = demote_css(html, config["relative_css"], lvl) | ||||
|             tcloud = make_tagcloud(tagcloud, "../%s/latest.html") | ||||
|             fn = "%s/latest.html" % subdir | ||||
|             with open(fn, 'w') as f: | ||||
|                 pagectrl.written.append(fn) | ||||
|                 f.write( | ||||
|                     pagectrl.singlepage(newhtml, tcloud, latest, p=".") | ||||
|                 ) | ||||
|                 pagectrl.paginate(newhtml, tcloud, timeline, is_tagline) | ||||
|             cloud = make_tagcloud(tagcloud, link_from_top) | ||||
|             print(pagectrl.singlepage(html, cloud, latest)) | ||||
|             cloud = make_tagcloud(tagcloud, link_from_subdir) | ||||
|         else: | ||||
|             if subdir != "webring": # timelines per tag | ||||
|                 is_tagline = True | ||||
|                 level += 1 | ||||
|                 cloud    = make_tagcloud(tagcloud, link_from_tagdir) | ||||
|             else: | ||||
|                 cloud    = make_tagcloud(tagcloud, link_from_subdir) | ||||
|             demoted  = demote_css(html, config["relative_css"], level) | ||||
|             filename = "%s/latest.html" % subdir | ||||
|             with open(filename, 'w') as f: # landing page for tag | ||||
|                 pagectrl.written.append(filename) | ||||
|                 page = pagectrl.singlepage(demoted, cloud, latest, p=".") | ||||
|                 f.write(page) | ||||
|         pagectrl.paginate( | ||||
|             demote_css(html, config["relative_css"], level),  | ||||
|             cloud, timeline, is_tagline) | ||||
|         return pagectrl.written | ||||
| 
 | ||||
|     import toml | ||||
| @ -347,8 +354,133 @@ if __name__ == "__main__": | ||||
|             s = None | ||||
|         return s | ||||
| 
 | ||||
|     import json | ||||
|     def export_profile(post_count, last_update, config): | ||||
|         if "profile" not in config: | ||||
|             return | ||||
|         p = config["profile"] | ||||
|         p["post-count"] = post_count | ||||
|         p["last-updated"] = last_update | ||||
|         if "username" not in p or "url" not in p: | ||||
|             print("Warning: no profile exported", file=sys.stderr) | ||||
|             return | ||||
|         with open(config["file_output"], 'w') as f: | ||||
|             print(json.dumps(p), file=f) | ||||
| 
 | ||||
|     def get_webring(f_cfg):  | ||||
|         import pycurl | ||||
|         from io import BytesIO | ||||
|         def get_proxy(): | ||||
|             proxy = "" | ||||
|             if "http_proxy" in os.environ: | ||||
|                 proxy = os.environ["http_proxy"] | ||||
|             elif "https_proxy" in os.environ: | ||||
|                 proxy = os.environ["https_proxy"] | ||||
|             host = proxy[proxy.rfind('/') + 1: proxy.rfind(':')] | ||||
|             port = proxy[proxy.rfind(':') + 1:] | ||||
|             foo = proxy.find("socks://") >= 0 or proxy.find("socks5h://") | ||||
|             return host, int(port), foo | ||||
| 
 | ||||
|         def fetch(url_list): | ||||
|             curl = pycurl.Curl()  | ||||
|             if "http_proxy" in os.environ or "https_proxy" in os.environ: | ||||
|                 hostname, port_no, is_socks = get_proxy() | ||||
|                 curl.setopt(pycurl.PROXY, hostname) | ||||
|                 curl.setopt(pycurl.PROXYPORT, port_no) | ||||
|                 if is_socks: | ||||
|                     curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME) | ||||
|             datum = [] | ||||
|             meta  = [] | ||||
|             for url in url_list: | ||||
|                 buf = BytesIO() | ||||
|                 curl.setopt(curl.WRITEDATA, buf) | ||||
|                 curl.setopt(pycurl.URL, url) | ||||
|                 try: | ||||
|                     curl.perform() | ||||
|                     datum.append(buf) | ||||
|                     meta.append(curl.getinfo(curl.CONTENT_TYPE)) | ||||
|                 except pycurl.error as e: | ||||
|                     print(e,": ", url, file=sys.stderr) | ||||
|                 # print(buf.getvalue(),"\n\t", curl.getinfo(curl.CONTENT_TYPE), file=sys.stderr) | ||||
|             curl.close() | ||||
|             assert(len(datum) == len(meta)) | ||||
|             return datum, meta | ||||
| 
 | ||||
|         def to_json(curl_outs): | ||||
|             json_objs = [] | ||||
|             for buf in curl_outs: | ||||
|                 try: | ||||
|                     json_objs.append(json.loads(buf.getvalue())) | ||||
|                 except Exception as e: | ||||
|                     print(e) | ||||
|             return json_objs | ||||
| 
 | ||||
|         def render(profiles, template): | ||||
|             rendered = [] | ||||
|             SHORT_BIO_LIMIT = 150 | ||||
|             for profile in profiles: | ||||
|                 try: | ||||
|                     epoch_timestamp = profile["last-updated"] | ||||
|                     if not isinstance(epoch_timestamp, int): | ||||
|                         epoch_timestamp = 0 | ||||
|                     post_count = profile["post-count"] | ||||
|                     if not isinstance(post_count, int): | ||||
|                         post_count = 0 | ||||
|                     self_desc = profile["short-bio"] | ||||
|                     if len(profile["short-bio"]) >= SHORT_BIO_LIMIT: | ||||
|                         self_desc = profile["short-bio"][:SHORT_BIO_LIMIT] + "..." | ||||
|                     foo = template.format( | ||||
|                         __avatar__=escape(profile["avatar"]), | ||||
|                         __handle__=escape(profile["username"]), | ||||
|                         __url__=escape(profile["url"]), | ||||
|                         __post_count__ = post_count, | ||||
|                         __shortbio__= escape(self_desc), | ||||
|                         __lastupdated__= strftime( | ||||
|                             "%Y %b %d", localtime(epoch_timestamp)) ) | ||||
|                     rendered.append(foo) | ||||
|                 except KeyError as e: | ||||
|                     print("remote profile is missing key: ", e, file=sys.stderr) | ||||
|                     print("\tsource: ", profile, file=sys.stderr) | ||||
|             return rendered | ||||
| 
 | ||||
|         def get_avatars(profiles, save_path, img_src): | ||||
|             import hashlib | ||||
|             imgs, info = fetch([p["avatar"] for p in profiles]) | ||||
|             length = len(imgs) | ||||
|             if length != len(profiles) or length == 0: | ||||
|                 print("error in retrieving images", file=sys.stderr) | ||||
|                 return | ||||
|             for i in range(0,length): | ||||
|                 content_type  = info[i].split('/') | ||||
|                 ext           = content_type.pop() | ||||
|                 if content_type.pop() != "image": | ||||
|                     print("\tskip: not an image", file=sys.stderr) | ||||
|                     continue | ||||
|                 data  = imgs[i].getvalue() | ||||
|                 h = hashlib.sha1(data).hexdigest() | ||||
|                 filename = "%s.%s" % (h, ext) | ||||
|                 path     = "%s/%s" % (save_path, filename) | ||||
|                 profiles[i]["avatar"] = "%s/%s" % (img_src, filename) | ||||
|                 if not os.path.isfile(path): | ||||
|                     with open(path, "wb") as f: | ||||
|                         f.write(data) | ||||
| 
 | ||||
|         j, m = fetch(f_cfg["list"]) | ||||
|         list_of_json_objs = to_json(j) | ||||
|         if list_of_json_objs == []: | ||||
|             print("no remote profiles loaded", file=sys.stderr) | ||||
|             return [] | ||||
|         if f_cfg["internal-avatars"]["enabled"]: | ||||
|             a = f_cfg["internal-avatars"]["local_path_to_avatars"] | ||||
|             b = f_cfg["internal-avatars"]["path_to_avatars"] | ||||
|             get_avatars(list_of_json_objs, a, b) | ||||
|         try: | ||||
|             list_of_json_objs.sort(key=lambda e: e["last-updated"], reverse=True) | ||||
|         except KeyError: pass | ||||
|         return render(list_of_json_objs, f_cfg["format"]) | ||||
| 
 | ||||
|     def main(): | ||||
|         tpl, content = get_args() | ||||
|         tpl, content, skip_fetch = get_args() | ||||
|         cfg = load_settings() | ||||
|         if cfg == None: | ||||
|             print("exit: no settings.toml found.", file=sys.stderr) | ||||
| @ -359,7 +491,8 @@ if __name__ == "__main__": | ||||
|         if "page" not in cfg: | ||||
|             print("exit: table 'page' absent in settings.toml", file=sys.stderr) | ||||
|             return | ||||
|         tl, tc, tg = get_posts(content, cfg["post"]) | ||||
|         p = parse_txt(content) | ||||
|         tl, tc, tg = get_posts(p, cfg["post"]) | ||||
|         if tl == []: | ||||
|             return | ||||
|         # main timeline | ||||
| @ -378,18 +511,36 @@ if __name__ == "__main__": | ||||
|                 tpl, tagline, tc, cfg["page"], \ | ||||
|                 subdir="tags/%s" % key[1:]     \ | ||||
|             )  | ||||
|         if "webring" in cfg: | ||||
|             if cfg["webring"]["enabled"] == True: | ||||
|                 export_profile( | ||||
|                     len(p), p[0].get_epoch_time(), cfg["webring"] ) | ||||
|             if not skip_fetch: | ||||
|                 fellows = get_webring(cfg["webring"]["following"] ) | ||||
|                 if fellows != []: | ||||
|                     updated += writepage( | ||||
|                         tpl, fellows, tc, cfg["page"], subdir="webring") | ||||
|         with open("updatedfiles.txt", 'w') as f: | ||||
|             for filename in updated: | ||||
|                 print(filename, file=f) # sys.stderr) | ||||
|             if "latestpage" in cfg: | ||||
|                 print(cfg["latestpage"], file=f) | ||||
|             if "latestpages" in cfg: | ||||
|                 for page in cfg["latestpages"]: | ||||
|                     print(page, file=f) | ||||
|     try: | ||||
|         main() | ||||
|     except KeyError as e: | ||||
|         traceback.print_exc() | ||||
|         print("\n\tA key may be missing from your settings file.", file=sys.stderr) | ||||
|     except dateutil.parser._parser.ParserError as e: | ||||
|     except dateutil.parser._parser.ParserError: | ||||
|         traceback.print_exc() | ||||
|         print("\n\tFailed to interpret a date from string..", | ||||
|               "\n\tYour file of posts may be malformed.", | ||||
|               "\n\tCheck if your file starts with a line break.", file=sys.stderr) | ||||
|     except toml.decoder.TomlDecodeError: | ||||
|         traceback.print_exc() | ||||
|         print("\n\tYour configuration file is malformed.") | ||||
|     except FileNotFoundError as e: | ||||
|         traceback.print_exc() | ||||
|         print("\n\tA potential cause is attempting to save a file to a folder that does not exist.") | ||||
|  | ||||
| @ -1,11 +1,30 @@ | ||||
| 
 | ||||
| import sys, subprocess, getpass, pycurl, urllib.parse | ||||
| import sys, os, subprocess, getpass, pycurl, urllib.parse | ||||
| if __name__ == "__main__": | ||||
|     def get_proxy(): | ||||
|         proxy = "" | ||||
|         if "http_proxy" in os.environ: | ||||
|             proxy = os.environ["http_proxy"] | ||||
|         elif "https_proxy" in os.environ: | ||||
|             proxy = os.environ["https_proxy"] | ||||
|         host = proxy[proxy.rfind('/') + 1: proxy.rfind(':')] | ||||
|         port = proxy[proxy.rfind(':') + 1:] | ||||
|         foo = proxy.find("socks://") >= 0 or proxy.find("socks5h://") | ||||
|         return host, int(port), foo | ||||
| 
 | ||||
|     def api_upload(endpoint, dest_fmt = "/microblog/%s"): | ||||
|         pages = [] | ||||
|         with open("updatedfiles.txt") as f: | ||||
|             pages = f.readlines() | ||||
|         c = pycurl.Curl() | ||||
| 
 | ||||
|         if "http_proxy" in os.environ or "https_proxy" in os.environ: | ||||
|             hostname, port_no, is_socks = get_proxy() | ||||
|             c.setopt(pycurl.PROXY, hostname) | ||||
|             c.setopt(pycurl.PROXYPORT, port_no) | ||||
|             if is_socks: | ||||
|                 c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME) | ||||
| 
 | ||||
|         c.setopt(c.URL, endpoint) | ||||
|         c.setopt(c.POST, 1) | ||||
|         for page in pages: | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user