Created
February 14, 2020 01:32
-
-
Save binarybrat/9a82121e8728ea44eadbe9d2b93b7948 to your computer and use it in GitHub Desktop.
current main script
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import os, sys, traceback | |
| import ri0t | |
| import dbthelper | |
| from reddit.utils.counters import * | |
| from reddit.utils._company_rgx import * | |
| from utils.log import init_logger | |
| from models import BaseModel, init_db | |
| from models.blah import TaggingPatterns, WayBackTutorials, SpookyLinks, SewUser | |
| from models.messagemodel import * | |
| from models.companymodel import * | |
| from models.commentmodel import * | |
| from models.postmodel import * | |
| from models.detailsmodel import * | |
| from models.modlogmodel import * | |
| from peewee import OperationalError | |
| from models.tagmodel import tag_counts | |
| from models.risingmodel import * | |
| from reddit._pattern_output import * | |
| from reddit._pattern_outputONLAPTOP import * | |
| from reddit.tubeChecking import * | |
| from reddit.utils.randomhelpershit import * | |
| from reddit.utils.globalvars import * | |
| import praw.models | |
| logger = init_logger(__name__, testing_mode=False) | |
| START_TIME = time.time() | |
| HOWAGO = START_TIME - (3600 * 48) | |
| sqldb = init_db() | |
| sqldb.connect() | |
| sqldb.create_tables([SewUser, Posts, Comment, Pattern, PatternPosts, PendingPatternPosts, PatternTags, TagCategory, Tag, Rising]) | |
| sqldb.create_tables([Modlog, Details, DetailsPending, YoutubeLogging, TubeStatsLogging, Tutorials, SewingMessages, WikiPatterns, OutputInfo, TaggingPatterns, WayBackTutorials, SpookyLinks, DataPatternsPending]) | |
| def grabPatternTagsTable(verbose=False): | |
| patterns = Pattern.select(Pattern, fn.group_concat(Tag.tagname).alias("taglist") | |
| ).join(PatternTags, JOIN.LEFT_OUTER, on=(PatternTags.patternID == Pattern.id) | |
| ).join(Tag, JOIN.LEFT_OUTER, on=(Tag.id == PatternTags.tagID) | |
| ).group_by(Pattern.id) | |
| if verbose: | |
| print("Returning {} patterns...".format(patterns.count())) | |
| return patterns if patterns else None | |
| reddit = initR('sewingmodthings') | |
| r = ri0t.login() | |
| dbtr = dbthelper.login() | |
| sewsub = reddit.subreddit('sewing') | |
| dbtsub = dbtr.subreddit('dbtselfhelp') | |
| def processDetailsPending(post): | |
| if not reminder_msgDone(reddit, post): | |
| send_nodetails_reminder(post) | |
| wmsgid = get_nodetails_msgID(reddit, post) | |
| save_pendingDetails(post, warning=True, warningmsg=wmsgid) | |
| tocheckApprove = [] | |
| tocheckRemove = [] | |
| tocheckCrap = [] | |
| tocheckAlreadyApproved = [] | |
| def check_pending(timeback=24, wordmin=40, scoremin=50): | |
| forceapprove = 0 | |
| tocheckDB = get_tocheckDB(timeback=timeback) | |
| tocheckIDS = [pos.idstr for pos in tocheckDB] | |
| tocheckWarnedIDS = [pos.idstr for pos in tocheckDB if not pos.filtered] | |
| tocheckFilteredIDS = [pos.idstr for pos in tocheckDB if pos.filtered] | |
| if not tocheckIDS: | |
| return print("Nothing to check for Pending Details...") | |
| else: | |
| logger.info("Scanning pending posts: {} - Warned: {} || Filtered: {}".format(len(tocheckIDS), len(tocheckWarnedIDS), len(tocheckFilteredIDS))) | |
| for post in reddit.info([x for x in tocheckIDS]): | |
| if isdeleted(post): | |
| toupdate = DetailsPending.update(ignore=True).where(DetailsPending.idstr == post.fullname) | |
| toupdate.execute() | |
| print("Deleted post set to ignore: {}\n{}\n------------------------------------\n".format(post.shortlink, post.title)) | |
| elif post.spam or hardcore_removed(post): | |
| print("Continuing on {} - {}\nPost marked as spam or hardcore removed...".format(post.shortlink, post.title)) | |
| continue | |
| elif already_approved(post, justme=True): | |
| update_pendingApproved(post) | |
| forceapprove += 1 | |
| logger.info("Force update post to approved: {}\n{}\n------------------------------------\n".format(post.shortlink, post.title)) | |
| else: | |
| post._fetch() | |
| comitem = post_has_details(post, wordmin=wordmin) | |
| if comitem and not hardcore_removed(post) and not already_approved(post, justme=True) and post not in tocheckApprove: | |
| tocheckApprove.append(post) | |
| else: | |
| if not comitem and not hardcore_removed(post) and not already_approved(post, justme=True): | |
| pending = get_pending_post_details(post) | |
| if pending.filtered == False and not post.removed and post.score >= scoremin and elapsed(post) >= 2000: | |
| dtime = time.mktime(pending.fetched.timetuple()) | |
| howago = time.time() - dtime | |
| if howago >= 3600 and post not in tocheckRemove: | |
| tocheckRemove.append(post) | |
| return print( | |
| "To Approve: {} | To Remove: {} || Force Approved: {}".format(len(tocheckApprove), len(tocheckRemove), | |
| forceapprove)) | |
| def RemovePending(): | |
| global remsgid | |
| print("Removing {} warned posts for not giving details...".format(len(tocheckRemove))) | |
| doublecheck = uniq(tocheckRemove) | |
| for post in doublecheck: | |
| if not removal_msgDone(reddit, post): | |
| send_nodetails_removal(post) | |
| remsgid = get_nodetails_removalID(reddit, post) | |
| logger.info("Post: {} Author: {} Removal Msg: {}".format(post.fullname, post.author.name, remsgid)) | |
| post.mod.remove(spam=False) | |
| logger.info("Updating Details Pending DB...\n--------------------\n") | |
| update_pendingFiltered(post, remsgid) | |
| tocheckRemove.clear() | |
| def ApprovePending(): | |
| logger.info("Approving {} posts that added details…".format(len(tocheckApprove))) | |
| for post in tocheckApprove: | |
| post._fetch() | |
| if post.removed and not hardcore_removed(post): | |
| modpost(r, post.id, 'approve') | |
| logger.info("Approved a removed pending details post by u/{} - {}".format(post.author, post.shortlink)) | |
| elif time.time() >= 1565783643.0 and not approval_msgDone(reddit, post): | |
| send_details_approval(post) | |
| apmsgid = get_details_approvalID(reddit, post) | |
| logger.info("Post: {} Author: {} Approval Msg: {}".format(post.fullname, post.author, apmsgid)) | |
| update_pendingApproved(post) | |
| tocheckApprove.clear() | |
| def needs_reminder(post, wordmin=50): | |
| if not isdeleted(post) and not post.spam and not post.removed and post.created_utc >= HOWAGO and elapsed(post) >= 1800 and post.link_flair_text is not None and post.link_flair_text.lower() in [x.lower() for x in FLAIRTOCHECK] and not contains_pattern(post) and not details_in_selftext(post, wordmin) and not dpDone(post) and not already_approved(post, justme=True): | |
| comitem = post_has_details(post, wordmin=wordmin) | |
| if comitem is None: | |
| return True | |
| return False | |
| def getnewlink(link): | |
| newlink = link.split("</a>")[0] if "</a>" in link else link | |
| return newlink | |
| notindatabaseyet = [] | |
| def grabsubstuff(amntofpost): | |
| newpostsList.clear() | |
| newcommentsList.clear() | |
| unflairedList.clear() | |
| notindatabaseyet.clear() | |
| count = 0 | |
| started = datetime.now() | |
| for post in sewsub.new(limit=amntofpost): | |
| count += 1 | |
| if count % 350 == 0: | |
| print(count, 'done') | |
| newpostsList.append(post) | |
| if not already_approved(post): | |
| notindatabaseyet.append(post) | |
| if post.link_flair_text is None and not isdeleted(post): | |
| if (contains_pattern(post) or contains_project_phrases(post) and 'help' not in post.title.lower()) or hasreports(post): | |
| flair_post(post, 'FO', verbose=True) | |
| if post.link_flair_text is None: | |
| unflairedList.append(post) | |
| post.comments.replace_more(limit=None) | |
| for comment in post.comments.list(): | |
| newcommentsList.append(comment) | |
| duration = (datetime.now() - started).seconds | |
| print("\n\n---------------------------------------------\n") | |
| print("End of adding posts - took: {} to run.".format(prettytime(duration))) | |
| print("Posts: {} | Not In DB: {} | Unflaired: {} | Comments: {}".format(len(newpostsList), len(notindatabaseyet), | |
| len(unflairedList), len(newcommentsList))) | |
| print("-----------------------------------------------------------------------------------") | |
| print("-----------------------------------------------------------------------------------") | |
| def quickPostProcess(listofposts, verbose=False): | |
| count = 0 | |
| processed = 0 | |
| updated = 0 | |
| delup = 0 | |
| ccontinue = 0 | |
| if not isinstance(listofposts, list): | |
| listofposts = [listofposts] | |
| numofposts = len(listofposts) | |
| startTime = datetime.now() | |
| if verbose: | |
| print("Starting: {} - {} posts to process...\n\n".format(startTime.strftime(formatdt), numofposts)) | |
| for post in reddit.info([pos.fullname for pos in listofposts]): | |
| count += 1 | |
| if count % 50 == 0: | |
| print(count, 'done') | |
| posdone = Posts.select().where(Posts.fullname == post.fullname) | |
| if not posdone and not isdeleted(post): | |
| add_post(post, verbose=verbose) | |
| processed += 1 | |
| else: | |
| if isdeleted(post): | |
| delUpdate_post(post, verbose=verbose) | |
| delup += 1 | |
| else: | |
| update_post(post, verbose=verbose) | |
| updated += 1 | |
| post.comments.replace_more(limit=None) | |
| for comment in post.comments.list(): | |
| newcommentsList.append(comment) | |
| duration = (datetime.now() - startTime).seconds | |
| print("\n\n") | |
| print("End of adding posts - took: {} to run.".format(prettytime(duration))) | |
| print("Processed: {} | Updated: {} | DelUpdated: {}".format(processed, updated, delup)) | |
| print("Comments Grabbed: {}".format(len(newcommentsList))) | |
| print("-----------------------------------------------------------------------------------") | |
| print("-----------------------------------------------------------------------------------") | |
| def gettutshit(item): | |
| comid = item.split(' | ')[0] | |
| linkurl = re.sub(r"[\'\]\]]", '', item.split(' | ')[1]) | |
| return comid, linkurl | |
| unmodded_posts = [] | |
| unmodded_needsreminder = [] | |
| def scan_unmodded(cnt=30, wordmin=50, justTesting=False): | |
| if len(unmodded_posts) >= 1: | |
| unmodded_posts.clear() | |
| if len(unmodded_needsreminder) >= 1: | |
| unmodded_needsreminder.clear() | |
| for post in sewsub.mod.unmoderated(limit=cnt): | |
| if justTesting: | |
| if post not in unmodded_posts: | |
| unmodded_posts.append(post) | |
| if unflaired_canFlair(post): | |
| flair_post(post, 'FO', verbose=True) | |
| if needs_reminder(post, wordmin=wordmin) and not dpDone(post): | |
| if justTesting: | |
| if post not in unmodded_needsreminder: | |
| unmodded_needsreminder.append(post) | |
| else: | |
| processDetailsPending(post) | |
| if justTesting: | |
| return print("Unmodded Posts: {} | Needs Reminder: {}".format(len(unmodded_posts), len(unmodded_needsreminder))) | |
| def scan_posts(cnt=35, poscnt=350, wordmin=40, riscore=850, v=True): | |
| from reddit.utils.counters import count, processed, updated | |
| ppp = 0 | |
| pflair = 0 | |
| rflair = 0 | |
| repflair = 0 | |
| riscnt = 0 | |
| pendetails = 0 | |
| delup = 0 | |
| startTime = datetime.now() | |
| print("Starting Post Scanner: {}".format(startTime.strftime(dtformat))) | |
| for post in sewsub.new(limit=poscnt): | |
| count += 1 | |
| if count % cnt == 0: | |
| print(count, 'done') | |
| if post.link_flair_text is None and unflaired_canFlair(post): | |
| flair_post(post, 'FO', verbose=True) | |
| pflair += 1 | |
| if needs_reminder(post, wordmin=wordmin) and not dpDone(post): | |
| processDetailsPending(post) | |
| elif post.link_flair_text is not None: | |
| if nonproject_needs_reflair(post) and not reflairDone(reddit, post): | |
| process_non_project_post(post) | |
| rflair += 1 | |
| logger.info( | |
| "Sent General/Other please reflair reminder to u/{}\n{} {}\n\n".format(post.author.name, | |
| post.shortlink, | |
| post.title)) | |
| if needs_reminder(post, wordmin=wordmin) and not dpDone(post): | |
| processDetailsPending(post) | |
| pendetails += 1 | |
| if post.link_flair_text.lower() in [x.lower() for x in FLAIRTOCHECK] and contains_pattern(post) and not isdeleted( | |
| post) and not post.spam and not post.removed and not ppp_done(post): | |
| add_ppp(post) | |
| ppp += 1 | |
| logger.info("Added post to Pending Patterns DB: {}\n{}\n\n".format(post.fullname, post.title)) | |
| posdone = Posts.get_or_none(Posts.fullname == post.fullname) | |
| if not posdone and not isdeleted(post): | |
| add_post(post, verbose=v) | |
| processed += 1 | |
| else: | |
| if isdeleted(post): | |
| delUpdate_post(post, verbose=v) | |
| delup += 1 | |
| else: | |
| update_post(post, verbose=False) | |
| updated += 1 | |
| currentitis = datetime.now() | |
| aweekback = currentitis - timedelta(days=4) | |
| if datetime.utcfromtimestamp(post.created_utc) >= aweekback and not isdeleted(post) and not post.spam and not post.removed and post.score >= riscore and not doneRising(post): | |
| rhelper = RisingHelper(reddit, post, doSticky=True, doOpMsg=False, doDiscord=True) | |
| logger.info("Processed Rising Post: {}\n{}\n\n".format(post.shortlink, post.title)) | |
| riscnt += 1 | |
| duration = (datetime.now() - startTime).seconds | |
| cpurple("Post Scanning Processes Done. || {}".format(prettytime(duration))) | |
| cpurple("Processed: {} || Updated: {} || || Details Remind: {} || Rising: {} || Flaired: {} || Flair Reported: {} || Patterns: {}".format(processed, updated, pendetails, riscnt, pflair, repflair, ppp)) | |
| comsDONE = [] | |
| def scan_comments(reddit, comlmt=700, grablinks=True, verbose=True, overkill=False, wordmin=40): | |
| from reddit.utils.counters import ccount, cprocessed, cupdated, cdelup, cppp, ctut, detdone, pendone | |
| startTime = datetime.now() | |
| print("Starting Comment Scanner: {}".format(startTime.strftime(dtformat))) | |
| for comment in sewsub.comments(limit=comlmt): | |
| chelper = CommentHelper(reddit, comment, grablinks=grablinks, verbose=verbose, overkill=overkill, wordmin=wordmin) | |
| if not chelper.already_in_db() and not chelper.is_deleted: | |
| chelper.add_comment() | |
| cprocessed += 1 | |
| if chelper.has_comlinks and chelper.comment.author != 'sewingmodthings' and ( | |
| chelper.link_flair is None or chelper.link_flair is not None and chelper.link_flair.lower() not in | |
| ['machine questions', 'suggest machine', 'fabric question']): | |
| commentlinks = cleancomlinks(chelper.commentlinks) | |
| for linkurl in commentlinks: | |
| if re.search(tubergx, linkurl) and ("/channel/" or "/user/") not in linkurl: | |
| if comment.author == 'taichichuan123' and linkurl == 'https://www.youtube.com/watch?v=SgHxs6ukadM': | |
| continue | |
| process_tube_data(chelper.comment, linkurl) | |
| time.sleep(5) | |
| else: | |
| ext = tldextract.extract(linkurl) | |
| if not tutDone(chelper.comment.fullname, linkurl) and not ( | |
| ext.domain == 'spoonflower' and ext.subdomain == 'www') and not re.search( | |
| parserdomainsrgx, | |
| ext.domain) and not re.search( | |
| domainstoignore, ext.domain) and not re.search(isfilergx, linkurl): | |
| try: | |
| linktitle = grab_tutorial_info(chelper.comment.fullname, linkurl, verbose=True) | |
| add_tutorial(chelper.comment, tags=None, linktitle=linktitle, linkurl=linkurl, | |
| verbose=True) | |
| ctut += 1 | |
| time.sleep(5) | |
| except IntegrityError: | |
| pass | |
| except Exception as e: | |
| pass | |
| else: | |
| if chelper.is_deleted: | |
| chelper.updateDeleted_comment() | |
| cdelup += 1 | |
| else: | |
| chelper.update_comment() | |
| cupdated += 1 | |
| if chelper.details_comment: | |
| dhelper = DetailsHelper(reddit, comment, verbose=True, overkill=True) | |
| if not dhelper.already_in_db(): | |
| dhelper.add_details() | |
| detdone += 1 | |
| else: | |
| if dhelper.oktoupdate: | |
| dhelper.update_details() | |
| if dhelper.pending: | |
| if dhelper.pending.filtered and dhelper.comment.removed: | |
| print("Approving a filtered post for having details!") | |
| modpost(r, dhelper.submission.id, 'approve') | |
| if not dhelper.approval_msg_sent(): | |
| dhelper.send_details_approval() | |
| dhelper.update_pending_approved() | |
| pendone += 1 | |
| print("Updated post in pending details to being approved!") | |
| if chelper.pattern_comment and not ppp_done(chelper.submission): | |
| if contains_pattern(chelper.submission): | |
| add_ppp(chelper.submission) | |
| else: | |
| add_ppp(chelper.submission, chelper.comment.fullname, chelper.comment.body) | |
| print("Adding COMMENT pending pattern post: {} - {}".format(chelper.comment.fullname, | |
| chelper.submission.fullname)) | |
| cppp += 1 | |
| duration = (datetime.now() - startTime).seconds | |
| print("Comment Scanning Processes Done. || {}".format(prettytime(duration))) | |
| print( | |
| ">> Added: {} || DelUpdated {} || Updated: {} || Details Done: {} || Details Pending: {} || " | |
| "Tutorial Added: {} || Patterns: {}".format(cprocessed, cdelup, cupdated, detdone, pendone, ctut, cppp)) | |
| def comments_from_list(reddit, listofcomments, grablinks=False, verbose=False, overkill=False, wordmin=50): | |
| from reddit.utils.counters import ccount, cprocessed, cupdated, cdelup, cppp, ctut, detdone, pendone | |
| tubecnt = 0 | |
| startTime = datetime.now() | |
| print("Starting Comment Scanner From List: {} || {}".format(startTime.strftime(dtformat), len(listofcomments))) | |
| listofcomments.reverse() | |
| for comment in listofcomments: | |
| ccount += 1 | |
| if ccount % 150 == 0: | |
| print(ccount, 'DONNEEEE') | |
| chelper = CommentHelper(reddit, comment, grablinks=grablinks, verbose=verbose, overkill=overkill, | |
| wordmin=wordmin) | |
| if not chelper.already_in_db() and not chelper.is_deleted: | |
| chelper.add_comment() | |
| cprocessed += 1 | |
| if chelper.has_comlinks and chelper.comment.author != 'sewingmodthings' and ( | |
| chelper.link_flair is None or chelper.link_flair is not None and chelper.link_flair.lower() not in | |
| ['suggest machine', 'fabric question']): | |
| commentlinks = cleancomlinks(chelper.commentlinks) | |
| for linkurl in commentlinks: | |
| if re.search(tubergx, linkurl) and ("/channel/" or "/user/") not in linkurl: | |
| process_tube_data(chelper.comment, linkurl) | |
| tubecnt += 1 | |
| time.sleep(5) | |
| else: | |
| ext = tldextract.extract(linkurl) | |
| if not tutDone(chelper.comment.fullname, linkurl) and not ( | |
| ext.domain == 'spoonflower' and ext.subdomain == 'www') and not re.search( | |
| parserdomainsrgx, ext.domain) and not re.search(domainstoignore, | |
| ext.domain) and not re.search(isfilergx, | |
| linkurl): | |
| try: | |
| linktitle = grab_tutorial_info(chelper.comment.fullname, linkurl, verbose=True) | |
| add_tutorial(chelper.comment, tags=None, linktitle=linktitle, linkurl=linkurl, | |
| verbose=True) | |
| ctut += 1 | |
| time.sleep(5) | |
| except IntegrityError: | |
| pass | |
| except Exception as e: | |
| pass | |
| else: | |
| if chelper.is_deleted: | |
| chelper.updateDeleted_comment() | |
| cdelup += 1 | |
| else: | |
| chelper.update_comment() | |
| cupdated += 1 | |
| chelper.check_for_pattern() | |
| chelper.check_for_tutorial() | |
| chelper.check_for_details() | |
| if chelper.details_comment: | |
| dhelper = DetailsHelper(reddit, comment, verbose=True, overkill=True) | |
| if not dhelper.is_deleted and not dhelper.comment.spam and not dhelper.submission.spam and not ( | |
| dhelper.submission.removed and dhelper.submission.banned_by is not None and dhelper.submission.banned_by != 'sewingmodthings'): | |
| if not dhelper.already_in_db(): | |
| dhelper.add_details() | |
| detdone += 1 | |
| else: | |
| if dhelper.oktoupdate: | |
| dhelper.update_details() | |
| if dhelper.has_pattern and not dhelper.pattern_in_db(): | |
| try: | |
| dhelper.add_pattern() | |
| cppp += 1 | |
| except IntegrityError: | |
| pass | |
| if dhelper.pending: | |
| if dhelper.pending.filtered and dhelper.comment.removed: | |
| print("Approving a filtered post for having details!") | |
| modpost(r, dhelper.submission.id, 'approve') | |
| if not dhelper.approval_msg_sent(): | |
| dhelper.send_details_approval() | |
| dhelper.update_pending_approved() | |
| pendone += 1 | |
| print("Updated post in pending details to being approved!") | |
| duration = (datetime.now() - startTime).seconds | |
| print("Comment Scanning Processes Done. || {}".format(prettytime(duration))) | |
| print( | |
| ">> Added: {} || DelUpdated {} || Updated: {} || Details Done: {} || Details Pending: {} || " | |
| "Tutorial Added: {} || Youtube Added: {}".format(cprocessed, cdelup, cupdated, detdone, pendone, ctut, tubecnt)) | |
| def get_author_breakdown(reddit, authorname, grab_backshit=True): | |
| if grab_backshit: | |
| grab_author_backposts(reddit, authorname) | |
| grab_author_backcoms(reddit, authorname) | |
| authorposts = Posts.select().where(Posts.author == authorname) | |
| authorcomments = Comment.select().where(Comment.author == authorname) | |
| submittercoms = Comment.select().where((Comment.author == authorname) & (Comment.is_submitter == True)) | |
| notsubmittercoms = Comment.select().where((Comment.author == authorname) & (Comment.is_submitter == False)) | |
| postcount = authorposts.count() | |
| comcount = authorcomments.count() | |
| submittercount = submittercoms.count() | |
| helpfulcoms = notsubmittercoms.count() | |
| try: | |
| com2pos = helpfulcoms / postcount | |
| except ZeroDivisionError: | |
| com2pos = 'Nothing' | |
| datejoined = joinDate(authorname, verbose=False) | |
| joineddate = datetime.fromtimestamp(datejoined).strftime(dtformat) | |
| ago = human(datetime.fromtimestamp(datejoined)) | |
| template = """>> Breakdown for u/{} | |
| User Joined: {} - {} | |
| Total Posts: {} | |
| Total Comments: {} | |
| Helpful Comments: {} Submitter Comments: {} | |
| Posts to Helpful Comment Ratio: {} | |
| """ | |
| return print(template.format(authorname, joineddate, ago, postcount, comcount, helpfulcoms, submittercount, com2pos)) | |
| subScribers = 0 | |
| def get_subscribers(): | |
| return sewsub.widgets.id_card.subscribersCount | |
| def get_currentlyViewing(): | |
| return sewsub.widgets.id_card.currentlyViewingCount | |
| def run(doPending=True, doMessages=True, forever=True, timeback=24, wordmin=40, scoremin=50, poscnt=350, comlmt=750, riscore=850, v=True): | |
| loop = 0 | |
| count = 0 | |
| b00p_start = time.time() | |
| try: | |
| while True: | |
| try: | |
| loop += 1 | |
| count += 1 | |
| logger.info("Loop: " + str(loop) + ' at ' + str(datetime.now().strftime(formatdt))) | |
| loop_open = datetime.now() | |
| ### DO THINGS AND STUFF HERE ### | |
| if doPending: | |
| check_pending(timeback=timeback, wordmin=wordmin, scoremin=scoremin) | |
| if tocheckApprove: | |
| ApprovePending() | |
| if tocheckRemove: | |
| RemovePending() | |
| if doMessages: | |
| mhelper = MessageHelper(reddit, backmsgs=False) | |
| if len(mhelper.sewingmessages) >= 1: | |
| mhelper.processMessages() | |
| scan_posts(poscnt=poscnt, wordmin=wordmin, riscore=riscore, v=v) | |
| scan_comments(reddit, comlmt=comlmt, grablinks=True, verbose=True, wordmin=wordmin) | |
| process_modlog(reddit=reddit, cnt=160, interval=150, verbose=False) | |
| loop_duration = (datetime.now() - loop_open).seconds | |
| logger.info("End of loop {}. Loop took {} to run.".format(loop, prettytime(loop_duration))) | |
| except IntegrityError: | |
| pass | |
| except OperationalError: | |
| logger.debug("Operational Error: Trying to re-connect to database") | |
| sqldb.close() | |
| sqldb.connect() | |
| except Exception as e: | |
| error_entry = traceback.format_exc() | |
| if any(keyword in error_entry for keyword in CONNECTION_ERRORS): | |
| logger.debug(error_entry) | |
| else: | |
| logger.debug(error_entry) | |
| if not forever: | |
| break | |
| subscribers = get_subscribers() | |
| TOSLEEP = calculate_sleep(subscribers) | |
| SLEEPY = TOSLEEP * 2 | |
| now = time.time() | |
| nextrun = datetime.fromtimestamp(now + TOSLEEP) | |
| prettyruntime = nextrun.strftime(formatdt) | |
| logger.info(f"Sleeping for {SLEEPY} seconds - next run at: {prettyruntime}") | |
| time.sleep(SLEEPY) | |
| except KeyboardInterrupt: | |
| print("Ran for {}".format(human(b00p_start))) | |
| def get_wikipageList(): | |
| pagelist = [] | |
| for wikipage in sewsub.wiki: | |
| pagelist.append(wikipage.name) | |
| return pagelist if pagelist is not None else None | |
| def get_wikipageContent(wikipagename): | |
| wikipage = sewsub.wiki[wikipagename] | |
| return wikipage.content_md | |
| if __name__ == "__main__": | |
| run(doPending=True, doMessages=True, forever=True, timeback=18, wordmin=50, scoremin=50, poscnt=150, comlmt=400, | |
| riscore=900, v=True) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment